summaryrefslogtreecommitdiff
path: root/Utility/Url.hs
blob: 945c930ee95f51bb2e74ef43f7c1d88f4d1e6781 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
{- Url downloading.
 -
 - Copyright 2011-2020 Joey Hess <id@joeyh.name>
 -
 - License: BSD-2-clause
 -}

{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}

module Utility.Url (
	newManager,
	URLString,
	UserAgent,
	Scheme,
	mkScheme,
	allowedScheme,
	UrlDownloader(..),
	NonHttpUrlDownloader(..),
	UrlOptions(..),
	defUrlOptions,
	mkUrlOptions,
	check,
	checkBoth,
	exists,
	UrlInfo(..),
	getUrlInfo,
	assumeUrlExists,
	download,
	downloadQuiet,
	downloadConduit,
	sinkResponseFile,
	downloadPartial,
	parseURIRelaxed,
	matchStatusCodeException,
	matchHttpExceptionContent,
	BasicAuth(..),
	GetBasicAuth,
	noBasicAuth,
	applyBasicAuth',
	extractFromResourceT,
) where

import Common
import Utility.Metered
#ifdef WITH_HTTP_CLIENT_RESTRICTED
import Network.HTTP.Client.Restricted
#else
import Utility.HttpManagerRestricted
#endif
import Utility.IPAddress

import Network.URI
import Network.HTTP.Types
import qualified Network.Connection as NC
import qualified Data.CaseInsensitive as CI
import qualified Data.ByteString as B
import qualified Data.ByteString.UTF8 as B8
import qualified Data.ByteString.Lazy as L
import qualified Data.Set as S
import Control.Exception (throwIO, evaluate)
import Control.Monad.Trans.Resource
import Control.Monad.IO.Class (MonadIO)
import Control.DeepSeq
import Network.HTTP.Conduit
import Network.HTTP.Client
import Network.HTTP.Simple (getResponseHeader)
import Network.Socket
import Network.BSD (getProtocolNumber)
import Data.Either
import Data.Conduit
import Text.Read
import System.Log.Logger

type URLString = String

type Headers = [String]

type UserAgent = String

newtype Scheme = Scheme (CI.CI String)
	deriving (Eq, Ord)

mkScheme :: String -> Scheme
mkScheme = Scheme . CI.mk

fromScheme :: Scheme -> String
fromScheme (Scheme s) = CI.original s

data UrlOptions = UrlOptions
	{ userAgent :: Maybe UserAgent
	, reqHeaders :: Headers
	, urlDownloader :: UrlDownloader
	, applyRequest :: Request -> Request
	, httpManager :: Manager
	, allowedSchemes :: S.Set Scheme
	, getBasicAuth :: GetBasicAuth
	}

data UrlDownloader
	= DownloadWithConduit NonHttpUrlDownloader
	| DownloadWithCurl [CommandParam]

data NonHttpUrlDownloader
	= DownloadWithCurlRestricted Restriction

defUrlOptions :: IO UrlOptions
defUrlOptions = UrlOptions
	<$> pure Nothing
	<*> pure []
	<*> pure (DownloadWithConduit (DownloadWithCurlRestricted mempty))
	<*> pure id
	<*> newManager tlsManagerSettings
	<*> pure (S.fromList $ map mkScheme ["http", "https", "ftp"])
	<*> pure noBasicAuth

mkUrlOptions :: Maybe UserAgent -> Headers -> UrlDownloader -> Manager -> S.Set Scheme -> GetBasicAuth -> UrlOptions
mkUrlOptions defuseragent reqheaders urldownloader manager getbasicauth =
	UrlOptions useragent reqheaders urldownloader applyrequest manager getbasicauth
  where
	applyrequest = \r -> r { requestHeaders = requestHeaders r ++ addedheaders }
	addedheaders = uaheader ++ otherheaders
	useragent = maybe defuseragent (Just . B8.toString . snd)
		(headMaybe uafromheaders)
	uaheader = case useragent of
		Nothing -> []
		Just ua -> [(hUserAgent, B8.fromString ua)]
	(uafromheaders, otherheaders) = partition (\(h, _) -> h == hUserAgent)
		(map toheader reqheaders)
	toheader s =
		let (h, v) = separate (== ':') s
		    h' = CI.mk (B8.fromString h)
		in case v of
			(' ':v') -> (h', B8.fromString v')
			_ -> (h', B8.fromString v)

curlParams :: UrlOptions -> [CommandParam] -> [CommandParam]
curlParams uo ps = ps ++ uaparams ++ headerparams ++ addedparams ++ schemeparams
  where
	uaparams = case userAgent uo of
		Nothing -> []
		Just ua -> [Param "--user-agent", Param ua]
	headerparams = concatMap (\h -> [Param "-H", Param h]) (reqHeaders uo)
	addedparams = case urlDownloader uo of
		DownloadWithConduit _ -> []
		DownloadWithCurl l -> l
	schemeparams =
		[ Param "--proto"
		, Param $ intercalate "," ("-all" : schemelist)
		]
	schemelist = map fromScheme $ S.toList $ allowedSchemes uo

checkPolicy :: UrlOptions -> URI -> IO (Either String a) -> IO (Either String a)
checkPolicy uo u a
	| allowedScheme uo u = a
	| otherwise = return $ Left $
		"Configuration does not allow accessing " ++ show u

unsupportedUrlScheme :: URI -> String
unsupportedUrlScheme u = "Unsupported url scheme " ++ show u

allowedScheme :: UrlOptions -> URI -> Bool
allowedScheme uo u = uscheme `S.member` allowedSchemes uo
  where
	uscheme = mkScheme $ takeWhile (/=':') (uriScheme u)

{- Checks that an url exists and could be successfully downloaded,
 - also checking that its size, if available, matches a specified size.
 -
 - The Left error is returned if policy or the restricted http manager
 - does not allow accessing the url or the url scheme is not supported.
 -}
checkBoth :: URLString -> Maybe Integer -> UrlOptions -> IO (Either String Bool)
checkBoth url expected_size uo = fmap go <$> check url expected_size uo
  where
	go v = fst v && snd v

check :: URLString -> Maybe Integer -> UrlOptions -> IO (Either String (Bool, Bool))
check url expected_size uo = fmap go <$> getUrlInfo url uo
  where
	go (UrlInfo False _ _) = (False, False)
	go (UrlInfo True Nothing _) = (True, True)
	go (UrlInfo True s _) = case expected_size of
		Just _ -> (True, expected_size == s)
		Nothing -> (True, True)

exists :: URLString -> UrlOptions -> IO (Either String Bool)
exists url uo = fmap urlExists <$> getUrlInfo url uo

data UrlInfo = UrlInfo
	{ urlExists :: Bool
	, urlSize :: Maybe Integer
	, urlSuggestedFile :: Maybe FilePath
	}
	deriving (Show)

assumeUrlExists :: UrlInfo
assumeUrlExists = UrlInfo True Nothing Nothing

{- Checks that an url exists and could be successfully downloaded,
 - also returning its size and suggested filename if available.
 -
 - The Left error is returned if policy or the restricted http manages
 - does not allow accessing the url or the url scheme is not supported.
 -}
getUrlInfo :: URLString -> UrlOptions -> IO (Either String UrlInfo)
getUrlInfo url uo = case parseURIRelaxed url of
	Just u -> checkPolicy uo u (go u)
	Nothing -> return (Right dne)
   where
	go :: URI -> IO (Either String UrlInfo)
	go u = case (urlDownloader uo, parseRequest (show u)) of
		(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) ->
			existsconduit r req
		(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
			| isfileurl u -> Right <$> existsfile u
			| isftpurl u -> (Right <$> existscurlrestricted r u url ftpport)
				`catchNonAsync` (const $ return $ Right dne)
			| otherwise -> return $ Left $ unsupportedUrlScheme u
		(DownloadWithCurl _, _) 
			| isfileurl u -> Right <$> existsfile u
			| otherwise -> Right <$> existscurl u (basecurlparams url)
	
	dne = UrlInfo False Nothing Nothing
	found sz f = return $ UrlInfo True sz f

	isfileurl u = uriScheme u == "file:"
	isftpurl u = uriScheme u == "ftp:"

	ftpport = 21

	basecurlparams u = curlParams uo $
		[ Param "-s"
		, Param "--head"
		, Param "-L", Param u
		, Param "-w", Param "%{http_code}"
		]

	extractlencurl s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
		Just l -> case lastMaybe $ words l of
			Just sz -> readish sz
			_ -> Nothing
		_ -> Nothing
	
	extractlen = readish . B8.toString
		<=< lookup hContentLength . responseHeaders

	extractfilename = contentDispositionFilename . B8.toString
		<=< lookup hContentDisposition . responseHeaders

	existsconduit r req =
		let a = catchcrossprotoredir r (existsconduit' req uo)
		in catchJust matchconnectionrestricted a retconnectionrestricted
	
	matchconnectionrestricted he@(HttpExceptionRequest _ (InternalException ie)) =
		case fromException ie of
			Just (ConnectionRestricted _why) -> Just he
			_ -> Nothing
	matchconnectionrestricted _ = Nothing

	retconnectionrestricted he@(HttpExceptionRequest _ (InternalException ie)) =
		case fromException ie of
			Just (ConnectionRestricted why) -> return (Left why)
			_ -> throwM he
	retconnectionrestricted he = throwM he

	existsconduit' req uo' = do
		let req' = headRequest (applyRequest uo req)
		debugM "url" (show req')
		join $ runResourceT $ do
			resp <- http req' (httpManager uo)
			if responseStatus resp == ok200
				then do
					len <- extractFromResourceT (extractlen resp)
					fn <- extractFromResourceT (extractfilename resp)
					return $ found len fn
				else if responseStatus resp == unauthorized401
					then return $ getBasicAuth uo' (show (getUri req)) >>= \case
						Nothing -> return dne
						Just (ba, signalsuccess) -> do
							ui <- existsconduit'
								(applyBasicAuth' ba req)							
								(uo' { getBasicAuth = noBasicAuth })
							signalsuccess (urlExists ui)
							return ui
					else return $ return dne

	existscurl u curlparams = do
		output <- catchDefaultIO "" $
			readProcess "curl" $ toCommand curlparams
		let len = extractlencurl output
		let good = found len Nothing
		let isftp = or
			[ "ftp" `isInfixOf` uriScheme u
			-- Check to see if http redirected to ftp.
			, "Location: ftp://" `isInfixOf` output
			]
		case lastMaybe (lines output) of
			Just ('2':_:_) -> good
			-- don't try to parse ftp status codes; if curl
			-- got a length, it's good
			_ | isftp && isJust len -> good
			_ -> return dne
	
	existscurlrestricted r u url' defport = existscurl u 
		=<< curlRestrictedParams r u defport (basecurlparams url')

	existsfile u = do
		let f = unEscapeString (uriPath u)
		s <- catchMaybeIO $ getFileStatus f
		case s of
			Just stat -> do
				sz <- getFileSize' f stat
				found (Just sz) Nothing
			Nothing -> return dne

	-- When http server redirects to a protocol which conduit does not
	-- support, it will throw a StatusCodeException with found302
	-- and a Response with the redir Location.
	catchcrossprotoredir r a = 
		catchJust (matchStatusCodeException (== found302))
			(Right <$> a)
			(followredir r)
	
	followredir r (HttpExceptionRequest _ (StatusCodeException resp _)) = 
		case headMaybe $ map decodeBS $ getResponseHeader hLocation resp of
			Just url' -> case parseURIRelaxed url' of
				-- only follow http to ftp redirects;
				-- http to file redirect would not be secure,
				-- and http-conduit follows http to http.
				Just u' | isftpurl u' ->
					checkPolicy uo u' $ Right <$> 
						existscurlrestricted r u' url' ftpport
				_ -> return (Right dne)
			Nothing -> return (Right dne)
	followredir _ _ = return (Right dne)

-- Parse eg: attachment; filename="fname.ext"
-- per RFC 2616
contentDispositionFilename :: String -> Maybe FilePath
contentDispositionFilename s
	| "attachment; filename=\"" `isPrefixOf` s && "\"" `isSuffixOf` s =
		Just $ dropFromEnd 1 $ drop 1 $ dropWhile (/= '"') s
	| otherwise = Nothing

headRequest :: Request -> Request
headRequest r = r
	{ method = methodHead
	-- remove defaut Accept-Encoding header, to get actual,
	-- not gzip compressed size.
	, requestHeaders = (hAcceptEncoding, B.empty) :
		filter (\(h, _) -> h /= hAcceptEncoding)
		(requestHeaders r)
	}

{- Download a perhaps large file, with auto-resume of incomplete downloads.
 -
 - When the download fails, returns an error message.
 -}
download :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO (Either String ())
download = download' False

{- Avoids displaying any error message, including silencing curl errors. -}
downloadQuiet :: MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO Bool
downloadQuiet meterupdate url file uo = isRight 
	<$> download' True meterupdate url file uo

download' :: Bool -> MeterUpdate -> URLString -> FilePath -> UrlOptions -> IO (Either String ())
download' nocurlerror meterupdate url file uo =
	catchJust matchHttpException go showhttpexception
		`catchNonAsync` (dlfailed . show)
  where
	go = case parseURIRelaxed url of
		Just u -> checkPolicy uo u $
			case (urlDownloader uo, parseRequest (show u)) of
				(DownloadWithConduit (DownloadWithCurlRestricted r), Just req) -> catchJust
					(matchStatusCodeException (== found302))
					(downloadConduit meterupdate req file uo >> return (Right ()))
					(followredir r)
				(DownloadWithConduit (DownloadWithCurlRestricted r), Nothing)
					| isfileurl u -> downloadfile u
					| isftpurl u -> downloadcurlrestricted r u url ftpport
					| otherwise -> dlfailed $ unsupportedUrlScheme u
				(DownloadWithCurl _, _)
					| isfileurl u -> downloadfile u
					| otherwise -> downloadcurl url basecurlparams
		Nothing -> do
			liftIO $ debugM "url" url
			dlfailed "invalid url"
	
	isfileurl u = uriScheme u == "file:"
	isftpurl u = uriScheme u == "ftp:"

	ftpport = 21

	showhttpexception he = dlfailed $ case he of
		HttpExceptionRequest _ (StatusCodeException r _) ->
			B8.toString $ statusMessage $ responseStatus r
		HttpExceptionRequest _ (InternalException ie) -> 
			case fromException ie of
				Nothing -> show ie
				Just (ConnectionRestricted why) -> why
		HttpExceptionRequest _ other -> show other
		_ -> show he
	
	dlfailed msg = return $ Left $ "download failed: " ++ msg

	basecurlparams = curlParams uo
		[ if nocurlerror
			then Param "-S"
			else Param "-sS"
		, Param "-f"
		, Param "-L"
		, Param "-C", Param "-"
		]

	downloadcurl rawurl curlparams = do
		-- curl does not create destination file
		-- if the url happens to be empty, so pre-create.
		unlessM (doesFileExist file) $
			writeFile file ""
		ifM (boolSystem "curl" (curlparams ++ [Param "-o", File file, File rawurl]))
			( return $ Right ()
			, return $ Left "download failed"
			)

	downloadcurlrestricted r u rawurl defport =
		downloadcurl rawurl =<< curlRestrictedParams r u defport basecurlparams

	downloadfile u = do
		let src = unEscapeString (uriPath u)
		withMeteredFile src meterupdate $
			L.writeFile file
		return $ Right ()

	-- Conduit does not support ftp, so will throw an exception on a
	-- redirect to a ftp url; fall back to curl.
	followredir r ex@(HttpExceptionRequest _ (StatusCodeException resp _)) = 
		case headMaybe $ map decodeBS $ getResponseHeader hLocation resp of
			Just url' -> case parseURIRelaxed url' of
				Just u' | isftpurl u' ->
					checkPolicy uo u' $
						downloadcurlrestricted r u' url' ftpport
				_ -> throwIO ex
			Nothing -> throwIO ex
	followredir _ ex = throwIO ex

{- Download a perhaps large file using conduit, with auto-resume
 - of incomplete downloads.
 -
 - Does not catch exceptions.
 -}
downloadConduit :: MeterUpdate -> Request -> FilePath -> UrlOptions -> IO ()
downloadConduit meterupdate req file uo =
	catchMaybeIO (getFileSize file) >>= \case
		Just sz | sz > 0 -> resumedownload sz
		_ -> join $ runResourceT $ do
			liftIO $ debugM "url" (show req')
			resp <- http req' (httpManager uo)
			if responseStatus resp == ok200
				then do
					store zeroBytesProcessed WriteMode resp
					return (return ())
				else do
					rf <- extractFromResourceT (respfailure resp)
					if responseStatus resp == unauthorized401
						then return $ getBasicAuth uo (show (getUri req')) >>= \case
							Nothing -> giveup rf
							Just ba -> retryauthed ba
						else return $ giveup rf
  where
	req' = applyRequest uo $ req
		-- Override http-client's default decompression of gzip
		-- compressed files. We want the unmodified file content.
		{ requestHeaders = (hAcceptEncoding, "identity") :
			filter ((/= hAcceptEncoding) . fst)
				(requestHeaders req)
		, decompress = const False
		}

	-- Resume download from where a previous download was interrupted, 
	-- when supported by the http server. The server may also opt to
	-- send the whole file rather than resuming.
	resumedownload sz = catchJust
		(matchStatusCodeHeadersException (alreadydownloaded sz))
		dl
		(const noop)
	  where
		dl = join $ runResourceT $ do
			let req'' = req' { requestHeaders = resumeFromHeader sz : requestHeaders req }
			liftIO $ debugM "url" (show req'')
			resp <- http req'' (httpManager uo)
			if responseStatus resp == partialContent206
				then do
					store (toBytesProcessed sz) AppendMode resp
					return (return ())
				else if responseStatus resp == ok200
					then do
						store zeroBytesProcessed WriteMode resp
						return (return ())
					else do
						rf <- extractFromResourceT (respfailure resp)
						if responseStatus resp == unauthorized401
							then return $ getBasicAuth uo (show (getUri req'')) >>= \case
								Nothing -> giveup rf
								Just ba -> retryauthed ba
							else return $ giveup rf
	
	alreadydownloaded sz s h = s == requestedRangeNotSatisfiable416 
		&& case lookup hContentRange h of
			-- This could be improved by fixing
			-- https://github.com/aristidb/http-types/issues/87
			Just crh -> crh == B8.fromString ("bytes */" ++ show sz)
			-- Some http servers send no Content-Range header when
			-- the range extends beyond the end of the file.
			-- There is no way to distinguish between the file
			-- being the same size on the http server, vs
			-- it being shorter than the file we already have.
			-- So assume we have the whole content of the file
			-- already, the same as wget and curl do.
			Nothing -> True
	
	store initialp mode resp =
		sinkResponseFile meterupdate initialp file mode resp
	
	respfailure = B8.toString . statusMessage . responseStatus
	
	retryauthed (ba, signalsuccess) = do
		r <- tryNonAsync $ downloadConduit
			meterupdate
			(applyBasicAuth' ba req)
			file
			(uo { getBasicAuth = noBasicAuth })
		case r of
			Right () -> signalsuccess True
			Left e -> do
				() <- signalsuccess False
				throwM e
	
{- Sinks a Response's body to a file. The file can either be opened in
 - WriteMode or AppendMode. Updates the meter as data is received.
 -
 - Note that the responseStatus is not checked by this function.
 -}
sinkResponseFile
	:: MonadResource m
	=> MeterUpdate
	-> BytesProcessed
	-> FilePath
	-> IOMode
	-> Response (ConduitM () B8.ByteString m ())
	-> m ()
sinkResponseFile meterupdate initialp file mode resp = do
	(fr, fh) <- allocate (openBinaryFile file mode) hClose
	runConduit $ responseBody resp .| go initialp fh
	release fr
  where
	go sofar fh = await >>= \case
		Nothing -> return ()
		Just bs -> do
			let sofar' = addBytesProcessed sofar (B.length bs)
			liftIO $ do
				void $ meterupdate sofar'
				B.hPut fh bs
			go sofar' fh

{- Downloads at least the specified number of bytes from an url. -}
downloadPartial :: URLString -> UrlOptions -> Int -> IO (Maybe L.ByteString)
downloadPartial url uo n = case parseURIRelaxed url of
	Nothing -> return Nothing
	Just u -> go u `catchNonAsync` const (return Nothing)
  where
	go u = case parseRequest (show u) of
		Nothing -> return Nothing
		Just req -> do
			let req' = applyRequest uo req
			liftIO $ debugM "url" (show req')
			withResponse req' (httpManager uo) $ \resp ->
				if responseStatus resp == ok200
					then Just <$> brReadSome (responseBody resp) n
					else return Nothing

{- Allows for spaces and other stuff in urls, properly escaping them. -}
parseURIRelaxed :: URLString -> Maybe URI
parseURIRelaxed s = maybe (parseURIRelaxed' s) Just $
	parseURI $ escapeURIString isAllowedInURI s

{- Some characters like '[' are allowed in eg, the address of
 - an uri, but cannot appear unescaped further along in the uri.
 - This handles that, expensively, by successively escaping each character
 - from the back of the url until the url parses.
 -}
parseURIRelaxed' :: URLString -> Maybe URI
parseURIRelaxed' s = go [] (reverse s)
  where
	go back [] = parseURI back
	go back (c:cs) = case parseURI (escapeURIString isAllowedInURI (reverse (c:cs)) ++ back) of
		Just u -> Just u
		Nothing -> go (escapeURIChar escapemore c ++ back) cs

	escapemore '[' = False
	escapemore ']' = False
	escapemore c = isAllowedInURI c

hAcceptEncoding :: CI.CI B.ByteString
hAcceptEncoding = "Accept-Encoding"

hContentDisposition :: CI.CI B.ByteString
hContentDisposition = "Content-Disposition"

hContentRange :: CI.CI B.ByteString
hContentRange = "Content-Range"

resumeFromHeader :: FileSize -> Header
resumeFromHeader sz = (hRange, renderByteRanges [ByteRangeFrom sz])

{- Use with eg:
 -
 - > catchJust (matchStatusCodeException (== notFound404))
 -}
matchStatusCodeException :: (Status -> Bool) -> HttpException -> Maybe HttpException
matchStatusCodeException want = matchStatusCodeHeadersException (\s _h -> want s)

matchStatusCodeHeadersException :: (Status -> ResponseHeaders -> Bool) -> HttpException -> Maybe HttpException
matchStatusCodeHeadersException want e@(HttpExceptionRequest _ (StatusCodeException r _))
	| want (responseStatus r) (responseHeaders r) = Just e
	| otherwise = Nothing
matchStatusCodeHeadersException _ _ = Nothing

{- Use with eg: 
 -
 - > catchJust matchHttpException
 -}
matchHttpException :: HttpException -> Maybe HttpException
matchHttpException = Just

matchHttpExceptionContent :: (HttpExceptionContent -> Bool) -> HttpException -> Maybe HttpException
matchHttpExceptionContent want e@(HttpExceptionRequest _ hec)
	| want hec = Just e
	| otherwise = Nothing
matchHttpExceptionContent _ _ = Nothing

{- Constructs parameters that prevent curl from accessing any IP addresses
 - blocked by the Restriction. These are added to the input parameters,
 - which should tell curl what to do.
 -
 - This has to disable redirects because it looks up the IP addresses 
 - of the host and after limiting to those allowed by the Restriction,
 - makes curl resolve the host to those IP addresses. It doesn't make sense
 - to use this for http anyway, only for ftp or perhaps other protocols
 - supported by curl.
 -
 - Throws an exception if the Restriction blocks all addresses, or
 - if the dns lookup fails. A malformed url will also cause an exception.
 -}
curlRestrictedParams :: Restriction -> URI -> Int -> [CommandParam] -> IO [CommandParam]
curlRestrictedParams r u defport ps = case uriAuthority u of
	Nothing -> giveup "malformed url"
	Just uath -> case uriPort uath of
		"" -> go (uriRegName uath) defport
		-- strict parser because the port we provide to curl
		-- needs to match the port in the url
		(':':s) -> case readMaybe s :: Maybe Int of
			Just p -> go (uriRegName uath) p
			Nothing -> giveup "malformed url"
		_ -> giveup "malformed url"
  where
	go hostname p = do
		proto <- getProtocolNumber "tcp"
		let serv = show p
		let hints = defaultHints
			{ addrFlags = [AI_ADDRCONFIG]
			, addrProtocol = proto
			, addrSocketType = Stream
			}
		addrs <- getAddrInfo (Just hints) (Just hostname) (Just serv)
		case partitionEithers (map checkrestriction addrs) of
			((e:_es), []) -> throwIO e
			(_, as)
				| null as -> throwIO $ 
					NC.HostNotResolved hostname
				| otherwise -> return $
					(limitresolve p) as ++ ps
	checkrestriction addr = maybe (Right addr) Left $
		checkAddressRestriction r addr
	limitresolve p addrs =
		[ Param "--resolve"
		, Param $ "*:" ++ show p ++ ":" ++ intercalate ":"
			(mapMaybe (bracketaddr <$$> extractIPAddress . addrAddress) addrs)
		-- Don't let a ftp server provide an IP address.
		, Param "--ftp-skip-pasv-ip"
		-- Prevent all http redirects.
		, Param "--max-redirs", Param "0"
		]
	bracketaddr a = "[" ++ a ++ "]"

data BasicAuth = BasicAuth
	{ basicAuthUser :: String
	, basicAuthPassword :: String
	}

-- Note that this is only used when using conduit, not curl.
--
-- The returned IO action is run after trying to use the BasicAuth,
-- indicating if the password worked.
type GetBasicAuth = URLString -> IO (Maybe (BasicAuth, Bool -> IO ()))

noBasicAuth :: GetBasicAuth
noBasicAuth = const $ pure Nothing

applyBasicAuth' :: BasicAuth -> Request -> Request
applyBasicAuth' ba = applyBasicAuth
	(encodeBS (basicAuthUser ba))
	(encodeBS (basicAuthPassword ba))

{- Make sure whatever is returned is fully evaluated. Avoids any possible
 - issues with laziness deferring processing until a time when the resource
 - has been freed. -}
extractFromResourceT :: (MonadIO m, NFData a) => a -> ResourceT m a
extractFromResourceT v = do
	liftIO $ evaluate (rnf v)
	return v