-
Notifications
You must be signed in to change notification settings - Fork 104
/
geturlsstm.hs
50 lines (41 loc) · 1.31 KB
/
geturlsstm.hs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
-- (c) Simon Marlow 2011, see the file LICENSE for copying terms.
--
-- Sample geturls.hs (CEFP summer school notes, 2011)
--
-- Downloading multiple URLs concurrently, timing the downloads
--
-- Compile with:
-- ghc -threaded --make geturls.hs
import GetURL
import TimeIt
import Control.Monad
import Control.Concurrent
import Control.Exception
import Text.Printf
import Control.Concurrent.STM
import qualified Data.ByteString as B
-----------------------------------------------------------------------------
-- Our Async API:
data Async a = Async (TVar (Maybe a))
async :: IO a -> IO (Async a)
async action = do
var <- atomically $ newTVar Nothing
forkIO (do a <- action; atomically (writeTVar var (Just a)))
return (Async var)
wait :: Async a -> IO a
wait (Async var) = atomically $ do
m <- readTVar var
case m of
Nothing -> retry
Just a -> return a
-----------------------------------------------------------------------------
sites = ["http://www.google.com",
"http://www.bing.com",
"http://www.yahoo.com",
"http://www.wikipedia.com/wiki/Spade",
"http://www.wikipedia.com/wiki/Shovel"]
main = mapM (async.http) sites >>= mapM wait
where
http url = do
(page, time) <- timeit $ getURL url
printf "downloaded: %s (%d bytes, %.2fs)\n" url (B.length page) time