Stub in Sync module to eventually keep the db in sync with sites
This commit is contained in:
parent
04abd71582
commit
d3495a9d2d
|
@ -137,6 +137,7 @@ executable chan-delorean-consoomer
|
||||||
Common.Server.ConsumerSettings
|
Common.Server.ConsumerSettings
|
||||||
Common.Server.JSONSettings
|
Common.Server.JSONSettings
|
||||||
PriorityQueue
|
PriorityQueue
|
||||||
|
Sync
|
||||||
|
|
||||||
-- LANGUAGE extensions used by modules in this package.
|
-- LANGUAGE extensions used by modules in this package.
|
||||||
-- other-extensions:
|
-- other-extensions:
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit e67b24b5f1035c9096e729437579794a7677bd3a
|
Subproject commit 0d85d44cbb5a50b4751751d5ca61707cc1b75587
|
11
src/Lib.hs
11
src/Lib.hs
|
@ -683,8 +683,8 @@ processBoard settings fgs@FileGetters {..} site board = do
|
||||||
++ (Boards.pathpart board) ++ ". Error: " ++ errMsg
|
++ (Boards.pathpart board) ++ ". Error: " ++ errMsg
|
||||||
|
|
||||||
|
|
||||||
processBoards :: JSONSettings -> FileGetters -> [ FilePath ] -> IO ()
|
getBoards :: JSONSettings -> [ FilePath ] -> IO (Sites.Site, [ Boards.Board ])
|
||||||
processBoards settings fgs board_names = do
|
getBoards settings board_names = do
|
||||||
site :: Sites.Site <- ensureSiteExists settings
|
site :: Sites.Site <- ensureSiteExists settings
|
||||||
let boardsSet = Set.fromList board_names
|
let boardsSet = Set.fromList board_names
|
||||||
let site_id_ = Sites.site_id site
|
let site_id_ = Sites.site_id site
|
||||||
|
@ -700,9 +700,14 @@ processBoards settings fgs board_names = do
|
||||||
created_boards <- createArchivesForNewBoards settings boardsSet boardnames site_id_
|
created_boards <- createArchivesForNewBoards settings boardsSet boardnames site_id_
|
||||||
let boards :: [ Boards.Board ] = archived_boards ++ created_boards
|
let boards :: [ Boards.Board ] = archived_boards ++ created_boards
|
||||||
let boards_we_have_data_for = filter (\board -> Set.member (Boards.pathpart board) boardsSet) boards
|
let boards_we_have_data_for = filter (\board -> Set.member (Boards.pathpart board) boardsSet) boards
|
||||||
mapM_ (processBoard settings fgs site) boards_we_have_data_for
|
return (site, boards_we_have_data_for)
|
||||||
|
|
||||||
|
|
||||||
|
processBoards :: JSONSettings -> FileGetters -> [ FilePath ] -> IO ()
|
||||||
|
processBoards settings fgs board_names =
|
||||||
|
getBoards settings board_names >>= \(site, boards) ->
|
||||||
|
mapM_ (processBoard settings fgs site) boards
|
||||||
|
|
||||||
|
|
||||||
processBackupDirectory :: JSONSettings -> IO ()
|
processBackupDirectory :: JSONSettings -> IO ()
|
||||||
processBackupDirectory settings = do
|
processBackupDirectory settings = do
|
||||||
|
|
16
src/Main.hs
16
src/Main.hs
|
@ -20,6 +20,7 @@ import Lib
|
||||||
)
|
)
|
||||||
import qualified Network.DataClient as Client
|
import qualified Network.DataClient as Client
|
||||||
import qualified Common.AttachmentType as At
|
import qualified Common.AttachmentType as At
|
||||||
|
import Sync
|
||||||
|
|
||||||
newtype CliArgs = CliArgs
|
newtype CliArgs = CliArgs
|
||||||
{ settingsFile :: String
|
{ settingsFile :: String
|
||||||
|
@ -105,11 +106,18 @@ processWebsite settings site_settings = do
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = do
|
main = do
|
||||||
putStrLn "Starting channel web synchronization."
|
|
||||||
|
|
||||||
settings <- getSettings
|
settings <- getSettings
|
||||||
print settings
|
print settings
|
||||||
|
|
||||||
_ <- mapConcurrently (processWebsite settings) (websites settings)
|
_ <- if http_fill_all settings
|
||||||
|
then do
|
||||||
|
putStrLn "Starting web backfill"
|
||||||
|
mapConcurrently (processWebsite settings) (websites settings)
|
||||||
|
else return []
|
||||||
|
|
||||||
|
if http_sync_continously settings
|
||||||
|
then syncWebsites settings
|
||||||
|
else return ()
|
||||||
|
|
||||||
|
putStrLn "Done"
|
||||||
|
|
||||||
putStrLn "Done."
|
|
||||||
|
|
|
@ -16,10 +16,11 @@ module PriorityQueue
|
||||||
, take
|
, take
|
||||||
, put
|
, put
|
||||||
, selectSkewedIndex
|
, selectSkewedIndex
|
||||||
|
, main
|
||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
import Prelude hiding (splitAt, take)
|
import Prelude hiding (splitAt, take, min, max, elem)
|
||||||
import Data.Set hiding (take, foldr, map)
|
import Data.Set hiding (take, foldr, map)
|
||||||
import Data.Ord (comparing)
|
import Data.Ord (comparing)
|
||||||
import System.Random (StdGen, getStdGen, randomR)
|
import System.Random (StdGen, getStdGen, randomR)
|
||||||
|
@ -81,7 +82,7 @@ main = do
|
||||||
-- let x = fst $ take i q
|
-- let x = fst $ take i q
|
||||||
-- print (i, priority x)
|
-- print (i, priority x)
|
||||||
|
|
||||||
let rs = foldr f ([], stdGen) [1..100000]
|
let rs = foldr f ([], stdGen) ([1..100000] :: [ Int ])
|
||||||
mapM_ pf $ countOccurrences $ fst rs
|
mapM_ pf $ countOccurrences $ fst rs
|
||||||
|
|
||||||
where
|
where
|
||||||
|
@ -98,4 +99,3 @@ main = do
|
||||||
countOccurrences :: (Eq a, Ord a) => [a] -> [(a, Int)]
|
countOccurrences :: (Eq a, Ord a) => [a] -> [(a, Int)]
|
||||||
countOccurrences rolls = map (\x -> (head x, length x)) . group . sort $ rolls
|
countOccurrences rolls = map (\x -> (head x, length x)) . group . sort $ rolls
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
module Sync where
|
||||||
|
|
||||||
|
import Common.Server.ConsumerSettings
|
||||||
|
|
||||||
|
syncWebsites :: ConsumerJSONSettings -> IO ()
|
||||||
|
syncWebsites _ = do
|
||||||
|
putStrLn "Starting channel web synchronization."
|
||||||
|
|
||||||
|
-- first we need all the (Site, Board) tuples
|
||||||
|
-- perhaps we even want all (Site, Board, Thread) pairs
|
||||||
|
-- But then we don't load the posts of each thread, instead only do
|
||||||
|
-- that for threads which change,
|
||||||
|
-- - which means after we get all the threads
|
||||||
|
-- - enter a loop where you
|
||||||
|
-- - pick a board
|
||||||
|
-- - compare the threads online to memory
|
||||||
|
-- - load only the changed/new ones
|
||||||
|
-- - put board back
|
||||||
|
|
Loading…
Reference in New Issue