diff --git a/datafiles/static/browse.js b/datafiles/static/browse.js
new file mode 100644
index 000000000..9b4e9425a
--- /dev/null
+++ b/datafiles/static/browse.js
@@ -0,0 +1,398 @@
+const d = document;
+
+const initialParams = new URL(d.location).searchParams;
+// This parameter is named 'terms' because it is from before filters were
+// introduced. But we will parse it as a normal search string (including filters)
+const initialSearchQuery = initialParams.has('terms') ? initialParams.get('terms') : ''
+d.querySelector("#searchQuery").value = initialSearchQuery;
+
+class Model {
+ page = 0
+ numberOfResults = 0
+ column = 'default'
+ direction = 'ascending'
+ searchQuery = initialSearchQuery
+ flipDirection() {
+ if (this.direction === 'ascending') {
+ return ['ascending', this.direction = 'descending'];
+ } else {
+ return ['descending', this.direction = 'ascending'];
+ }
+ }
+}
+
+const state = new Model();
+
+addEventListener('popstate', async (evt) => {
+ if (evt.state === null) {
+ return;
+ }
+ state.page = evt.state.page;
+ state.column = evt.state.column;
+ state.direction = evt.state.direction;
+ state.searchQuery = evt.state.searchQuery;
+ d.querySelector("#searchQuery").value = evt.state.searchQuery;
+ await refresh();
+});
+
+const get = () => new Promise((resolve,reject) => {
+ const formData = new FormData();
+ const obj =
+ { page: state.page
+ , sort: {column: state.column, direction: state.direction}
+ , searchQuery: state.searchQuery
+ };
+ formData.append('browseOptions', JSON.stringify(obj));
+ fetch('/packages/search', {method:'POST', body: formData}).then(async (response) => {
+ if (!response.ok) {
+ const el = d.querySelector("#fatalError");
+ el.style.display = "block";
+ const err = await response.text();
+ el.textContent = "Error with Hackage server: " + err;
+ console.log(obj);
+ reject(new Error("fetch failed: " + err));
+ } else {
+ resolve(response.json());
+ }
+ });
+});
+
+const createName = (nameDict) => {
+ const name = d.createElement("td");
+ const nameLink = d.createElement("a");
+ nameLink.setAttribute("href", nameDict.uri);
+ nameLink.appendChild(d.createTextNode(nameDict.display));
+ name.appendChild(nameLink);
+ return name;
+}
+
+const createSimpleText = (text) => {
+ const el = d.createElement("td");
+ el.appendChild(d.createTextNode(text));
+ return el;
+}
+
+// Used with renderUser and renderTag results from backend
+const createCommaList = (arr) => {
+ const ul = d.createElement("ul");
+ ul.classList.add("commaList");
+ for (const dict of arr) {
+ const li = d.createElement("li");
+ const a = d.createElement("a");
+ a.setAttribute("href", dict.uri);
+ a.appendChild(d.createTextNode(dict.display));
+ li.appendChild(a);
+ ul.appendChild(li);
+ }
+ return ul;
+}
+
+const createTags = (tagsArr) => {
+ const el = d.createElement("td");
+ if (tagsArr === []) {
+ return el;
+ }
+ el.appendChild(d.createTextNode("("));
+ const ul = createCommaList(tagsArr);
+ el.appendChild(ul);
+ el.appendChild(d.createTextNode(")"));
+ return el;
+};
+
+const createLastUpload = (lastUploadISO8601) => {
+ const el = d.createElement("td");
+ const date = lastUploadISO8601.substr(0, "0000-00-00".length);
+ el.setAttribute("title", new Date(lastUploadISO8601).toLocaleString());
+ el.classList.add("lastUpload");
+ el.appendChild(d.createTextNode(date));
+ return el;
+};
+
+const createMaintainers = (maintainersArr) => {
+ const el = d.createElement("td");
+ if (maintainersArr === []) {
+ return el;
+ }
+ const ul = createCommaList(maintainersArr);
+ el.appendChild(ul);
+ return el;
+};
+
+const replaceRows = (response) => {
+ const l = d.querySelector("#listing");
+ l.replaceChildren();
+ for (const row of response.pageContents) {
+ const tr = d.createElement("tr");
+ tr.appendChild(createName(row.name));
+ tr.appendChild(createSimpleText(row.downloads));
+ tr.appendChild(createSimpleText(row.votes));
+ tr.appendChild(createSimpleText(row.description));
+ tr.appendChild(createTags(row.tags));
+ tr.appendChild(createLastUpload(row.lastUpload));
+ tr.appendChild(createMaintainers(row.maintainers));
+ l.appendChild(tr);
+ }
+};
+
+const removeSortIndicator = () => {
+ // No column is actually visible for the default sort mode,
+ // so there is nothing to do in that case.
+ if (state.column !== 'default') {
+ const columnHeader = d.querySelector("#arrow-" + state.column);
+ columnHeader.removeAttribute("aria-sort");
+ const oldClasses = columnHeader.classList;
+ oldClasses.remove('ascending');
+ oldClasses.remove('descending');
+ }
+}
+
+export const sort = async (column) => {
+ if (state.column === column) {
+ const [oldCls, newCls] = state.flipDirection();
+ const columnHeader = d.querySelector("#arrow-" + column);
+ const classes = columnHeader.classList;
+ classes.toggle(oldCls);
+ classes.toggle(newCls);
+ columnHeader.setAttribute("aria-sort", newCls);
+ } else {
+ removeSortIndicator();
+
+ state.direction = 'ascending';
+ state.column = column;
+
+ // Add sort indicator on new column
+ const columnHeader = d.querySelector("#arrow-" + column);
+ columnHeader.classList.add("ascending");
+ columnHeader.setAttribute("aria-sort", "ascending");
+ }
+ state.page = 0;
+ await refresh();
+};
+
+const pageSize = 50; // make sure it is kept in sync with backend
+
+const pageAvailable = (page) => {
+ if (page < 0) return false;
+ if (page === 0) return true;
+ return page * pageSize < state.numberOfResults;
+}
+
+const changePage = async (candidate) => {
+ if (!pageAvailable(candidate)) {
+ return;
+ }
+ state.page = candidate;
+ history.pushState(state, d.title);
+ await refresh();
+ scrollTo(0, d.body.scrollHeight);
+};
+
+const createIndexIndicator = () => {
+ const el = d.createElement("div");
+ const minIdx = state.page * pageSize + 1;
+ let maxIdx = (state.page + 1) * pageSize;
+ maxIdx = Math.min(maxIdx, state.numberOfResults);
+ let fullMsg;
+ if (state.numberOfResults === 0) {
+ fullMsg = "No results found.";
+ } else {
+ const entriesText = state.numberOfResults === 1 ? "entry" : "entries";
+ fullMsg = `Showing ${minIdx} to ${maxIdx} of ${state.numberOfResults} ${entriesText}`;
+ }
+ el.appendChild(d.createTextNode(fullMsg));
+ return el;
+};
+
+const refresh = async () => {
+ const res = await get();
+ state.numberOfResults = res.numberOfResults;
+ replaceRows(res);
+ const container = d.querySelector("#paginatorContainer");
+ container.replaceChildren();
+ container.appendChild(createIndexIndicator());
+ container.appendChild(createPaginator());
+ if (state.searchQuery.trim() === "") {
+ d.querySelector("#browseFooter").style.display = "none";
+ } else {
+ d.querySelector("#browseFooter").style.display = "block";
+ const url = new URL(hoogleNoParam);
+ url.searchParams.set("hoogle", state.searchQuery);
+ d.querySelector("#hoogleLink").setAttribute("href", url);
+ }
+};
+
+export const submitSearch = async (evt) => {
+ if (evt) evt.preventDefault();
+ state.searchQuery = d.querySelector("#searchQuery").value;
+ removeSortIndicator();
+ state.column = 'default';
+ state.direction = 'ascending';
+ state.page = 0;
+
+ const url = new URL(d.location);
+ url.searchParams.set('terms', state.searchQuery);
+ history.pushState(state, d.title, url);
+
+ await refresh();
+};
+
+const createPageLink = (num) => {
+ const a = d.createElement("a");
+ if (state.page == num) a.classList.add("current");
+ a.setAttribute("href", "#");
+ a.addEventListener('click', (evt) => {
+ evt.preventDefault();
+ changePage(num);
+ });
+ a.appendChild(d.createTextNode(num + 1));
+ return a;
+};
+
+const createPrevNext = (prevNextNum, cond, txt) => {
+ const el = d.createElement(cond ? "span" : "a");
+ el.setAttribute("href", "#");
+ el.addEventListener('click', (evt) => {
+ evt.preventDefault();
+ changePage(prevNextNum);
+ });
+ if (cond) el.classList.add("disabled");
+ el.appendChild(d.createTextNode(txt));
+ return el;
+};
+
+const createEllipsis = () => {
+ const el = d.createElement("span");
+ el.innerHTML = "…";
+ return el;
+};
+
+const createPaginator = () => {
+ const maxPage = maxAvailablePage(state.numberOfResults);
+
+ const pag = d.createElement("div");
+ pag.classList.add("paginator");
+ pag.appendChild(createPrevNext(state.page - 1, state.page === 0, "Previous"));
+ // note that page is zero-indexed
+ if (maxPage <= 4) {
+ // No ellipsis
+ for (let i = 0; i <= maxPage; i++) {
+ pag.appendChild(createPageLink(i));
+ }
+ } else if (state.page <= 3) {
+ // One ellipsis, at the end
+ for (let i = 0; i <= 4; i++) {
+ pag.appendChild(createPageLink(i));
+ }
+ pag.appendChild(createEllipsis());
+ pag.appendChild(createPageLink(maxPage));
+ } else if (state.page + 3 >= maxPage) {
+ // One ellipsis, at the start
+ pag.appendChild(createPageLink(0));
+ pag.appendChild(createEllipsis());
+ for (let i = maxPage - 4; i <= maxPage; i++) {
+ pag.appendChild(createPageLink(i));
+ }
+ } else {
+ // Two ellipses, at both ends
+ pag.appendChild(createPageLink(0));
+ pag.appendChild(createEllipsis());
+ for (let i = state.page - 1; i <= state.page + 1; i++) {
+ pag.appendChild(createPageLink(i));
+ }
+ pag.appendChild(createEllipsis());
+ pag.appendChild(createPageLink(maxPage));
+ }
+ const isNowOnLastPage = state.page === maxPage;
+ pag.appendChild(createPrevNext(state.page + 1, isNowOnLastPage, "Next"));
+
+ return pag;
+};
+
+const maxAvailablePage = (numberOfResults) => {
+ if (numberOfResults === 0) numberOfResults++;
+ return Math.floor((numberOfResults - 1) / pageSize);
+};
+
+const hoogleNoParam = "https://hoogle.haskell.org";
+
+let expanded = false;
+
+export const toggleAdvanced = () => {
+ if (expanded) {
+ d.querySelector("#toggleAdvanced").setAttribute("aria-expanded", "false");
+ d.querySelector("#chevron").innerHTML = "▸";
+ d.querySelector("#advancedForm").style.display = "none";
+ } else {
+ d.querySelector("#toggleAdvanced").setAttribute("aria-expanded", "true");
+ d.querySelector("#chevron").innerHTML = "▾";
+ d.querySelector("#advancedForm").style.display = "block";
+ }
+ expanded = !expanded;
+};
+
+export const appendDeprecated = async (evt) => {
+ if (evt) evt.preventDefault();
+ d.querySelector("#searchQuery").value += " (deprecated:any)";
+ await submitSearch();
+};
+
+const isNonNegativeFloatString = (n) => {
+ // If there is a decimal separator, digits before it are required.
+ const parsed = parseFloat(n.match(/^\d+(\.\d+)?$/));
+ return parsed >= 0;
+};
+
+export const validateAgeOfLastUL = () => {
+ const el = d.querySelector("#advAgeLastUL");
+ const duration = el.value.trim();
+ if (duration === ""
+ || !(["d", "w", "m", "y"].includes(duration.substr(-1, 1)))
+ || !isNonNegativeFloatString(duration.substr(0, duration.length - 1))) {
+ el.setCustomValidity("Must be positive and end in d(ay), w(eek), m(onth) or y(ear)");
+ return false;
+ }
+ el.setCustomValidity("");
+ return duration;
+};
+
+export const appendAgeOfLastUL = async (evt) => {
+ if (evt) evt.preventDefault();
+ const maybeDuration = validateAgeOfLastUL();
+ if (maybeDuration === false) {
+ return;
+ }
+ const duration = maybeDuration;
+ d.querySelector("#searchQuery").value += ` (ageOfLastUpload < ${duration})`;
+ await submitSearch();
+};
+
+export const validateTag = () => {
+ const el = d.querySelector("#advTag");
+ const tag = el.value.trim();
+ if (tag === "" || !(/^[a-z0-9]+$/i.test(tag))) {
+ el.setCustomValidity("Tag cannot be empty and must be alphanumeric and ASCII");
+ return false;
+ }
+ el.setCustomValidity("");
+ return tag;
+}
+
+export const appendTag = async (evt) => {
+ if (evt) evt.preventDefault();
+ const maybeTag = validateTag();
+ if (maybeTag === false) {
+ return;
+ }
+ const tag = maybeTag;
+ d.querySelector("#searchQuery").value += ` (tag:${tag})`;
+ await submitSearch();
+};
+
+export const appendRating = async (evt) => {
+ if (evt) evt.preventDefault();
+ const rating = d.querySelector("#advRatingSlider").value;
+ d.querySelector("#searchQuery").value += ` (rating >= ${rating})`;
+ await submitSearch();
+};
+
+await refresh();
diff --git a/datafiles/templates/Html/browse.html.st b/datafiles/templates/Html/browse.html.st
new file mode 100644
index 000000000..a9df4d7f0
--- /dev/null
+++ b/datafiles/templates/Html/browse.html.st
@@ -0,0 +1,263 @@
+
+
+
+ $hackageCssTheme()$
+
+ $heading$ | Hackage
+
+
+
+ $hackagePageHeader()$
+
+
$heading$
+ $content$
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hackage-server.cabal b/hackage-server.cabal
index 0b72ac5db..dbad0b2fb 100644
--- a/hackage-server.cabal
+++ b/hackage-server.cabal
@@ -253,6 +253,10 @@ library lib-server
Distribution.Server.Util.Markdown
Distribution.Server.Features
+ Distribution.Server.Features.Browse
+ Distribution.Server.Features.Browse.ApplyFilter
+ Distribution.Server.Features.Browse.Options
+ Distribution.Server.Features.Browse.Parsers
Distribution.Server.Features.Core
Distribution.Server.Features.Core.State
Distribution.Server.Features.Core.Backup
@@ -362,6 +366,7 @@ library lib-server
, async ^>= 2.2.1
-- requires bumping http-io-streams
, attoparsec ^>= 0.14.4
+ , attoparsec-iso8601 ^>= 1.0
, base16-bytestring ^>= 1.0
-- requires bumping http-io-streams
, base64-bytestring ^>= 1.2.1.0
@@ -513,6 +518,21 @@ test-suite HighLevelTest
, io-streams ^>= 1.5.0.1
, http-io-streams ^>= 0.1.6.1
+test-suite PaginationTest
+ import: test-defaults
+ type: exitcode-stdio-1.0
+ main-is: PaginationTest.hs
+ build-tool-depends: hackage-server:hackage-server
+ other-modules: Util
+
+test-suite BrowseQueryParserTest
+ import: test-defaults
+ type: exitcode-stdio-1.0
+ main-is: BrowseQueryParserTest.hs
+ build-tool-depends: hackage-server:hackage-server
+ other-modules: Util
+ build-depends: attoparsec
+
test-suite CreateUserTest
import: test-defaults
diff --git a/src/Distribution/Server/Features.hs b/src/Distribution/Server/Features.hs
index 76d9c5145..71c23c4f6 100644
--- a/src/Distribution/Server/Features.hs
+++ b/src/Distribution/Server/Features.hs
@@ -21,6 +21,7 @@ import Distribution.Server.Features.Upload (initUploadFeature)
import Distribution.Server.Features.Mirror (initMirrorFeature)
#ifndef MINIMAL
+import Distribution.Server.Features.Browse (initBrowseFeature)
import Distribution.Server.Features.TarIndexCache (initTarIndexCacheFeature)
import Distribution.Server.Features.Html (initHtmlFeature)
import Distribution.Server.Features.PackageCandidates (initPackageCandidatesFeature, candidatesCoreResource, queryGetCandidateIndex)
@@ -152,6 +153,8 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do
initSitemapFeature env
mkPackageFeedFeature <- logStartup "package feed" $
initPackageFeedFeature env
+ mkBrowseFeature <- logStartup "browse" $
+ initBrowseFeature env
mkPackageJSONFeature <- logStartup "package info JSON" $
initPackageInfoJSONFeature env
#endif
@@ -327,6 +330,14 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do
usersFeature
tarIndexCacheFeature
+ browseFeature <- mkBrowseFeature
+ coreFeature
+ usersFeature
+ tagsFeature
+ listFeature
+ searchFeature
+ distroFeature
+
packageInfoJSONFeature <- mkPackageJSONFeature
coreFeature
versionsFeature
@@ -378,6 +389,7 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do
#ifdef DEBUG
, serverCrashFeature
#endif
+ , browseFeature
]
-- Run all post init hooks, now that everyone's gotten a chance to register
@@ -396,6 +408,7 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do
loginfo verbosity ("Initialising " ++ feature ++ " feature")
logTiming verbosity ("Initialising " ++ feature ++ " feature done") action
+
-- | Checkpoint a feature's persistent state to disk.
featureCheckpoint :: HackageFeature -> IO ()
featureCheckpoint = mapM_ abstractStateCheckpoint . featureState
diff --git a/src/Distribution/Server/Features/Browse.hs b/src/Distribution/Server/Features/Browse.hs
new file mode 100644
index 000000000..60095c1f6
--- /dev/null
+++ b/src/Distribution/Server/Features/Browse.hs
@@ -0,0 +1,129 @@
+{-# LANGUAGE BlockArguments, NamedFieldPuns, TupleSections #-}
+module Distribution.Server.Features.Browse (initBrowseFeature, PaginationConfig(..), StartIndex(..), NumElems(..), paginate) where
+
+import Control.Arrow (left)
+import Control.Monad.Except (ExceptT, liftIO, throwError)
+import Control.Monad.Trans.Class (lift)
+import Control.Monad.Trans.Except (except)
+import Data.ByteString.Lazy (ByteString)
+import Data.Time (getCurrentTime)
+
+import Data.Aeson (Value(Array), eitherDecode, object, toJSON, (.=))
+import qualified Data.Aeson.Key as Key
+import qualified Data.Vector as V
+
+import Distribution.Server.Features.Browse.ApplyFilter (applyFilter)
+import Distribution.Server.Features.Browse.Options (BrowseOptions(..), IsSearch(..))
+import Distribution.Server.Features.Core (CoreFeature(CoreFeature), queryGetPackageIndex, coreResource)
+import Distribution.Server.Features.Distro (DistroFeature)
+import Distribution.Server.Features.PackageList (ListFeature(ListFeature), makeItemList)
+import Distribution.Server.Features.Search (SearchFeature(SearchFeature), searchPackages)
+import Distribution.Server.Features.Tags (TagsFeature(TagsFeature), tagsResource)
+import Distribution.Server.Features.Users (UserFeature(UserFeature), userResource)
+import Distribution.Server.Framework.Error (ErrorResponse(ErrorResponse))
+import Distribution.Server.Framework.Feature (HackageFeature(..), emptyHackageFeature)
+import Distribution.Server.Framework.Resource (Resource(..), resourceAt)
+import Distribution.Server.Framework.ServerEnv (ServerEnv(..))
+import qualified Distribution.Server.Pages.Index as Pages
+
+import Happstack.Server.Monads (ServerPartT)
+import Happstack.Server.Response (ToMessage(toResponse))
+import Happstack.Server.RqData (lookBS)
+import Happstack.Server.Types (Method(POST), Response)
+
+type BrowseFeature =
+ CoreFeature
+ -> UserFeature
+ -> TagsFeature
+ -> ListFeature
+ -> SearchFeature
+ -> DistroFeature
+ -> IO HackageFeature
+
+initBrowseFeature :: ServerEnv -> IO BrowseFeature
+initBrowseFeature _env =
+ pure \coreFeature userFeature tagsFeature listFeature searchFeature distroFeature ->
+ pure $
+ (emptyHackageFeature "json")
+ { featureResources =
+ [ (resourceAt "/packages/search")
+ { resourceDesc =
+ [ (POST, "Browse and search using a BrowseOptions structure in multipart/form-data encoding")
+ ]
+ , resourcePost =
+ [ ("json"
+ , \_ -> getNewPkgList coreFeature userFeature tagsFeature listFeature searchFeature distroFeature
+ )
+ ]
+ }
+ ]
+ , featureState = []
+ }
+
+data PaginationConfig = PaginationConfig
+ { totalNumberOfElements :: Int
+ , pageNumber :: Int
+ }
+ deriving Show
+
+newtype NumElems = NumElems Int
+ deriving (Eq, Show)
+newtype StartIndex = StartIndex Int
+ deriving (Eq, Show)
+
+paginate :: PaginationConfig -> Maybe (StartIndex, NumElems)
+paginate PaginationConfig{totalNumberOfElements, pageNumber} = do
+ let pageSize = 50 -- make sure it is kept in sync with frontend
+ startIndex <-
+ if totalNumberOfElements <= pageNumber * pageSize
+ then
+ -- We don't want to claim that the page 0 is ever out of bounds,
+ -- since it is normal to request page 0 of a listing with 0 results.
+ if pageNumber == 0
+ then Just 0
+ else Nothing
+ else Just $ pageNumber * pageSize
+ Just
+ ( StartIndex startIndex
+ , NumElems $
+ if totalNumberOfElements < startIndex + pageSize
+ then totalNumberOfElements - startIndex
+ else pageSize
+ )
+
+getNewPkgList :: CoreFeature -> UserFeature -> TagsFeature -> ListFeature -> SearchFeature -> DistroFeature -> ServerPartT (ExceptT ErrorResponse IO) Response
+getNewPkgList CoreFeature{queryGetPackageIndex, coreResource} UserFeature{userResource} TagsFeature{tagsResource} ListFeature{makeItemList} SearchFeature{searchPackages} distroFeature = do
+ browseOptionsBS <- lookBS "browseOptions"
+ browseOptions <- lift (parseBrowseOptions browseOptionsBS)
+ (isSearch, packageNames) <-
+ case boSearchTerms browseOptions of
+ [] -> (IsNotSearch,) <$> Pages.toPackageNames <$> queryGetPackageIndex
+ terms -> (IsSearch,) <$> liftIO (searchPackages terms)
+ pkgDetails <- liftIO (makeItemList packageNames)
+ now <- liftIO getCurrentTime
+ listOfPkgs <- liftIO $ applyFilter now isSearch coreResource userResource tagsResource distroFeature browseOptions pkgDetails
+ let config =
+ PaginationConfig
+ { totalNumberOfElements = length listOfPkgs
+ , pageNumber = fromIntegral $ boPage browseOptions
+ }
+ (StartIndex startIndex, NumElems numElems) <-
+ lift $ maybe
+ (throwError . badRespFromString $ "Invalid page number: " ++ show config)
+ pure
+ (paginate config)
+ let pageContents = V.slice startIndex numElems (V.fromList listOfPkgs)
+ pure . toResponse $
+ object
+ [ Key.fromString "pageContents" .= Array pageContents
+ , Key.fromString "numberOfResults" .= toJSON (length listOfPkgs)
+ ]
+
+parseBrowseOptions :: ByteString -> ExceptT ErrorResponse IO BrowseOptions
+parseBrowseOptions browseOptionsBS = except eiDecoded
+ where
+ eiDecoded :: Either ErrorResponse BrowseOptions
+ eiDecoded = left badRespFromString (eitherDecode browseOptionsBS)
+
+badRespFromString :: String -> ErrorResponse
+badRespFromString err = ErrorResponse 400 [] err []
diff --git a/src/Distribution/Server/Features/Browse/ApplyFilter.hs b/src/Distribution/Server/Features/Browse/ApplyFilter.hs
new file mode 100644
index 000000000..3f2b9421d
--- /dev/null
+++ b/src/Distribution/Server/Features/Browse/ApplyFilter.hs
@@ -0,0 +1,113 @@
+{-# LANGUAGE RecordWildCards, NamedFieldPuns, TypeApplications #-}
+module Distribution.Server.Features.Browse.ApplyFilter (applyFilter) where
+
+import Control.Monad (filterM)
+import Data.List (sortBy)
+import Data.Ord (comparing)
+import Data.Time.Clock (UTCTime(utctDay), diffUTCTime)
+import Data.Time.Format.ISO8601 (iso8601Show)
+
+import Data.Aeson (Value, (.=), object)
+import qualified Data.Aeson.Key as Key
+
+import qualified Data.Set as S
+
+import Distribution.Server.Features.Browse.Options (BrowseOptions(..), Direction(..), Column(..), Sort(..), NormalColumn(..), IsSearch(..))
+import Distribution.Server.Features.Browse.Parsers (DeprecatedOption(..), Filter(..), operatorToFunction)
+import Distribution.Server.Features.Core (CoreResource, corePackageNameUri)
+import Distribution.Server.Features.Distro (DistroFeature(DistroFeature, queryPackageStatus))
+import Distribution.Server.Features.Distro.Types (DistroName(..))
+import Distribution.Server.Features.PackageList(PackageItem(..))
+import Distribution.Server.Features.Tags (Tag(..), TagsResource, tagUri)
+import Distribution.Server.Features.Users (UserResource, userPageUri)
+import Distribution.Server.Users.Types (UserName)
+import Distribution.Text (display)
+
+applyFilter :: UTCTime -> IsSearch -> CoreResource -> UserResource -> TagsResource -> DistroFeature -> BrowseOptions -> [PackageItem] -> IO [Value]
+applyFilter now isSearch coreResource userResource tagsResource DistroFeature{queryPackageStatus} browseOptions items = do
+ packages <- filterM filterForItem items
+ pure $
+ map packageIndexInfoToValue $
+ sort isSearch (boSort browseOptions) $
+ packages
+ where
+ packageIndexInfoToValue :: PackageItem -> Value
+ packageIndexInfoToValue PackageItem{..} =
+ object
+ [ Key.fromString "name" .= renderPackage itemName
+ , Key.fromString "downloads" .= itemDownloads
+ , Key.fromString "votes" .= itemVotes
+ , Key.fromString "description" .= itemDesc
+ , Key.fromString "tags" .= map renderTag (S.toAscList itemTags)
+ , Key.fromString "lastUpload" .= iso8601Show itemLastUpload
+ , Key.fromString "maintainers" .= map renderUser itemMaintainer
+ ]
+ renderTag :: Tag -> Value
+ renderTag tag =
+ object
+ [ Key.fromString "uri" .= tagUri tagsResource "" tag
+ , Key.fromString "display" .= display tag
+ ]
+ renderUser :: UserName -> Value
+ renderUser user =
+ object
+ [ Key.fromString "uri" .= userPageUri userResource "" user
+ , Key.fromString "display" .= display user
+ ]
+ renderPackage pkg =
+ object
+ [ Key.fromString "uri" .= corePackageNameUri coreResource "" pkg
+ , Key.fromString "display" .= display pkg
+ ]
+
+ includeItem :: PackageItem -> Filter -> IO Bool
+ includeItem PackageItem{ itemDownloads } (DownloadsFilter ( op, sndParam)) = pure $ operatorToFunction op (fromIntegral @Int @Word itemDownloads) sndParam
+ includeItem PackageItem{ itemVotes } (RatingFilter (op, sndParam) ) = pure $ operatorToFunction op itemVotes sndParam
+ includeItem PackageItem{ itemLastUpload } (LastUploadFilter (op, sndParam)) = pure $ operatorToFunction op (utctDay itemLastUpload) sndParam
+ includeItem PackageItem{ itemTags } (TagFilter tagStr) = pure $ any (\tag -> display tag == tagStr) itemTags
+ includeItem PackageItem{ itemMaintainer } (MaintainerFilter maintainerStr) = pure $ any (\user -> display user == maintainerStr) itemMaintainer
+ includeItem PackageItem{ itemLastUpload } (AgeLastULFilter (op, sndParam)) = pure $ operatorToFunction op (diffUTCTime now itemLastUpload) sndParam
+ includeItem PackageItem{ itemDeprecated } (DeprecatedFilter OnlyDeprecated) = pure $ not (null itemDeprecated)
+ includeItem PackageItem{ itemDeprecated } (DeprecatedFilter ExcludeDeprecated) = pure $ null itemDeprecated
+ includeItem _ (DeprecatedFilter Don'tCareAboutDeprecated) = pure True
+ includeItem PackageItem{ itemName } (DistroFilter distroStr) = elem (DistroName distroStr) . map fst <$> queryPackageStatus itemName
+ includeItem packageItem (Not filt) = not <$> includeItem packageItem filt
+
+ filtersWithoutDefaults = boFilters browseOptions
+
+ filtersWithDefaults =
+ -- The lack of other filters means we don't care.
+ -- But deprecated packages are excluded by default.
+ -- So we check if the user has overriden the default filter.
+ case [ x | DeprecatedFilter x <- filtersWithoutDefaults ] of
+ [] -> DeprecatedFilter ExcludeDeprecated : filtersWithoutDefaults
+ _ -> filtersWithoutDefaults
+
+ filterForItem :: PackageItem -> IO Bool
+ filterForItem item =
+ all id <$> traverse (includeItem item) filtersWithDefaults
+
+sort :: IsSearch -> Sort -> [PackageItem] -> [PackageItem]
+sort isSearch Sort {sortColumn, sortDirection} =
+ case sortColumn of
+ DefaultColumn ->
+ case isSearch of
+ IsSearch -> id
+ IsNotSearch -> id
+ NormalColumn normalColumn ->
+ let
+ comparer =
+ case normalColumn of
+ Name -> comparing itemName
+ Downloads -> comparing itemDownloads
+ Rating -> comparing itemVotes
+ Description -> comparing itemDesc
+ Tags -> comparing (S.toAscList . itemTags)
+ LastUpload -> comparing itemLastUpload
+ Maintainers -> comparing itemMaintainer
+ in sortBy (maybeReverse comparer)
+ where
+ maybeReverse =
+ case sortDirection of
+ Ascending -> id
+ Descending -> flip
diff --git a/src/Distribution/Server/Features/Browse/Options.hs b/src/Distribution/Server/Features/Browse/Options.hs
new file mode 100644
index 000000000..c894104b0
--- /dev/null
+++ b/src/Distribution/Server/Features/Browse/Options.hs
@@ -0,0 +1,78 @@
+{-# LANGUAGE BlockArguments, LambdaCase, OverloadedStrings #-}
+module Distribution.Server.Features.Browse.Options (BrowseOptions(..), Column(..), Direction(..), IsSearch(..), NormalColumn(..), Sort(..)) where
+
+import Data.Aeson ((.:), FromJSON(parseJSON), withObject, withText)
+import Data.Attoparsec.Text (parseOnly)
+import qualified Data.Text as T
+
+import Distribution.Server.Features.Browse.Parsers (Filter, conditions, condsToFiltersAndTerms)
+
+data IsSearch = IsSearch | IsNotSearch
+
+data NormalColumn = Name | Downloads | Rating | Description | Tags | LastUpload | Maintainers
+ deriving (Show, Eq)
+
+data Column = DefaultColumn | NormalColumn NormalColumn
+ deriving (Show, Eq)
+
+data Direction = Ascending | Descending
+ deriving (Show, Eq)
+
+data Sort = Sort
+ { sortColumn :: Column
+ , sortDirection :: Direction
+ }
+ deriving (Show, Eq)
+
+data BrowseOptions = BrowseOptions
+ { boPage :: Word
+ , boSort :: Sort
+ , boFilters :: [Filter]
+ , boSearchTerms :: [String]
+ }
+
+instance FromJSON Column where
+ parseJSON =
+ withText "Column"
+ \case
+ "default" -> pure DefaultColumn
+ "name" -> pure $ NormalColumn Name
+ "downloads" -> pure $ NormalColumn Downloads
+ "rating" -> pure $ NormalColumn Rating
+ "description" -> pure $ NormalColumn Description
+ "tags" -> pure $ NormalColumn Tags
+ "lastUpload" -> pure $ NormalColumn LastUpload
+ "maintainers" -> pure $ NormalColumn Maintainers
+ t -> fail $ "Column invalid: " ++ T.unpack t
+
+instance FromJSON Direction where
+ parseJSON =
+ withText "Direction"
+ \case
+ "ascending" -> pure Ascending
+ "descending" -> pure Descending
+ t -> fail $ "Direction invalid: " ++ T.unpack t
+
+instance FromJSON Sort where
+ parseJSON = withObject "Sort" \o ->
+ Sort
+ <$> o .: "column"
+ <*> o .: "direction"
+
+parse :: MonadFail m => T.Text -> m ([Filter], [String])
+parse searchQuery = do
+ -- Search query parsing should never fail
+ Right conds <- pure (parseOnly conditions searchQuery)
+ pure (condsToFiltersAndTerms conds)
+
+instance FromJSON BrowseOptions where
+ parseJSON = withObject "BrowseOptions" \o -> do
+ (page, sort, searchQuery) <-
+ (,,)
+ <$> o .: "page"
+ <*> o .: "sort"
+ <*> o .: "searchQuery"
+ -- The use of monad here won't make us suffer from
+ -- sequentiality since the parse should never fail.
+ (filters, terms) <- parse searchQuery
+ pure (BrowseOptions page sort filters terms)
diff --git a/src/Distribution/Server/Features/Browse/Parsers.hs b/src/Distribution/Server/Features/Browse/Parsers.hs
new file mode 100644
index 000000000..6445bbc1c
--- /dev/null
+++ b/src/Distribution/Server/Features/Browse/Parsers.hs
@@ -0,0 +1,163 @@
+{-# LANGUAGE BlockArguments, OverloadedStrings, TupleSections #-}
+module Distribution.Server.Features.Browse.Parsers
+ ( Condition(..)
+ , DeprecatedOption(..)
+ , Filter(..)
+ , Operator(..)
+ , conditions
+ , condsToFiltersAndTerms
+ , filterOrSearchTerms
+ , operatorToFunction
+ , searchTerms
+ ) where
+
+import Prelude hiding (Ordering(..), filter)
+import Control.Applicative ((<|>))
+import Control.Monad (guard, join)
+import Data.Foldable (asum)
+import Data.Time (Day, NominalDiffTime, nominalDay)
+import GHC.Float (double2Float)
+
+import Data.Attoparsec.Text
+import Data.Attoparsec.Time (day)
+import Data.Text (Text)
+
+data DeprecatedOption
+ = OnlyDeprecated
+ | ExcludeDeprecated
+ | Don'tCareAboutDeprecated
+ deriving (Show, Eq)
+
+data Filter
+ = DownloadsFilter (Operator, Word)
+ | RatingFilter (Operator, Float)
+ | LastUploadFilter (Operator, Day)
+ | AgeLastULFilter (Operator, NominalDiffTime)
+ | TagFilter String
+ | MaintainerFilter String
+ | DeprecatedFilter DeprecatedOption
+ | DistroFilter String
+ | Not Filter
+ deriving (Show, Eq)
+
+data Operator = LT | LTE | GT | GTE | EQ | NEQ
+ deriving (Show, Eq)
+
+deprecatedOption :: Parser DeprecatedOption
+deprecatedOption =
+ asum
+ [ "any" *> pure Don'tCareAboutDeprecated
+ , ("false" <|> "no") *> pure ExcludeDeprecated
+ , ("true" <|> "yes") *> pure OnlyDeprecated
+ ]
+
+
+operatorToFunction :: Ord a => Operator -> a -> (a -> Bool)
+operatorToFunction LT a = (a <)
+operatorToFunction LTE a = (a <=)
+operatorToFunction GT a = (a >)
+operatorToFunction GTE a = (a >=)
+operatorToFunction EQ a = (a ==)
+operatorToFunction NEQ a = (a /=)
+
+data Condition = FilterCond Filter | SearchTermCond String
+ deriving (Show, Eq)
+
+condsToFiltersAndTerms :: [Condition] -> ([Filter], [String])
+condsToFiltersAndTerms conds =
+ ([x | FilterCond x <- conds], [x | SearchTermCond x <- conds])
+
+opAndSndParam :: Ord a => Parser a -> Parser (Operator, a)
+opAndSndParam parser = do
+ let mkParser op = skipSpace *> fmap (op,) parser
+ lt = "<" *> mkParser LT
+ gt = ">" *> mkParser GT
+ gte = ">=" *> mkParser GTE
+ lte = "<=" *> mkParser LTE
+ eq = "=" *> mkParser EQ
+ longEq = "==" *> mkParser EQ
+ neq = "/=" *> mkParser NEQ
+ cStyleNeq = "!=" *> mkParser NEQ
+ in asum [lt, gt, gte, lte, eq, longEq, neq, cStyleNeq]
+
+allowedAfterOpeningBrace :: AllowNot -> Parser Text
+allowedAfterOpeningBrace AllowNot = "not " <|> allowedAfterOpeningBrace DisallowNot
+allowedAfterOpeningBrace _ =
+ asum
+ [ "downloads", "rating", "lastUpload" , "ageOfLastUpload"
+ , "tag:", "maintainer:", "deprecated:", "distro:"
+ ]
+
+-- Whether the 'not' operator can be used.
+-- (used to prevent recursive parsing)
+data AllowNot = AllowNot | DisallowNot
+
+filterWith :: AllowNot -> Parser Filter
+filterWith allowNot = do
+ fieldName <- allowedAfterOpeningBrace allowNot
+ if fieldName == "not "
+ then Not <$> filterWith DisallowNot
+ else do
+ skipSpace
+ let nonNegativeFloat :: Parser Float
+ nonNegativeFloat = do
+ float <- double2Float <$> double
+ guard $ float >= 0
+ pure float
+ filt <- case fieldName of
+ "downloads" -> DownloadsFilter <$> opAndSndParam decimal
+ "rating" -> RatingFilter <$> opAndSndParam nonNegativeFloat
+ "lastUpload" -> LastUploadFilter <$> opAndSndParam day
+ "ageOfLastUpload" -> AgeLastULFilter <$> opAndSndParam nominalDiffTime
+ "tag:" -> TagFilter <$> wordWoSpaceOrParens
+ "maintainer:" -> MaintainerFilter <$> wordWoSpaceOrParens
+ "deprecated:" -> DeprecatedFilter <$> deprecatedOption
+ "distro:" -> DistroFilter <$> wordWoSpaceOrParens
+ _ -> fail "Impossible since fieldName possibilities are known at compile time"
+ pure filt
+
+filter :: Parser [Condition]
+filter = do
+ filt <- filterWith AllowNot
+ pure [FilterCond filt]
+
+filterOrSearchTerms :: Parser [Condition]
+filterOrSearchTerms =
+ asum
+ [ do
+ _ <- "("
+ skipSpace
+ filt <- filter <|> searchTerms <|> pure []
+ skipSpace
+ _ <- ")"
+ pure filt
+ , searchTerms
+ ]
+
+searchTerms :: Parser [Condition]
+searchTerms = sepBy1 searchTerm (many1 space)
+
+-- The search engine accepts terms with spaces or parenthesis in them also but
+-- we do not allow that, just to keep this parser simple.
+searchTerm :: Parser Condition
+searchTerm = fmap SearchTermCond wordWoSpaceOrParens
+
+wordWoSpaceOrParens :: Parser String
+wordWoSpaceOrParens = many1 . satisfy $ notInClass " ()"
+
+conditions :: Parser [Condition]
+conditions = fmap join . many' $ skipSpace *> filterOrSearchTerms
+
+nominalDiffTime :: Parser NominalDiffTime
+nominalDiffTime = do
+ num <- double
+ guard (num > 0)
+ skipSpace
+ lengthSpecifier <- "d" <|> "w" <|> "m" <|> "y"
+ let days = realToFrac num * nominalDay
+ case lengthSpecifier of
+ "d" -> pure days
+ "w" -> pure (days * 7)
+ "m" -> pure (days * 30.437) -- Average month length
+ "y" -> pure (days * 365.25) -- Average year length
+ _ -> fail "Impossible since lengthSpecifier possibilities are known at compile time"
diff --git a/src/Distribution/Server/Features/Html.hs b/src/Distribution/Server/Features/Html.hs
index 566a222db..067a63ba3 100644
--- a/src/Distribution/Server/Features/Html.hs
+++ b/src/Distribution/Server/Features/Html.hs
@@ -21,7 +21,6 @@ import Distribution.Server.Features.Users
import Distribution.Server.Features.DownloadCount
import Distribution.Server.Features.Votes
import Distribution.Server.Features.Search
-import Distribution.Server.Features.Search as Search
import Distribution.Server.Features.PreferredVersions
-- [reverse index disabled] import Distribution.Server.Features.ReverseDependencies
import Distribution.Server.Features.PackageContents (PackageContentsFeature(..))
@@ -61,17 +60,13 @@ import Distribution.Package
import Distribution.Version
import Distribution.Text (display)
-import Data.Char (toLower)
import Data.List (intercalate, intersperse, insert)
import Data.Function (on)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Vector as Vec
import qualified Data.Text as T
-import Data.Array (Array, listArray)
-import qualified Data.Array as Array
-import qualified Data.Ix as Ix
-import qualified Data.ByteString.Lazy.Char8 as BS (ByteString, pack)
+import qualified Data.ByteString.Lazy.Char8 as BS (ByteString)
import qualified Network.URI as URI
import Text.XHtml.Strict
@@ -133,10 +128,11 @@ initHtmlFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode,
, "tag-edit.html"
, "candidate-page.html"
, "candidate-index.html"
+ , "browse.html"
]
- return $ \user@UserFeature{groupChangedHook} core@CoreFeature{packageChangeHook}
+ return $ \user core@CoreFeature{packageChangeHook}
packages upload
candidates versions
-- [reverse index disabled] reverse
@@ -150,7 +146,7 @@ initHtmlFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode,
reportsCore
usersdetails -> do
-- do rec, tie the knot
- rec let (feature, packageIndex, packagesPage, browseTable) =
+ rec let (feature, packageIndex, packagesPage) =
htmlFeature env user core
packages upload
candidates versions
@@ -163,7 +159,7 @@ initHtmlFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode,
reportsCore
usersdetails
(htmlUtilities core candidates tags user)
- mainCache namesCache browseCache
+ mainCache namesCache
templates
-- Index page caches
@@ -181,23 +177,13 @@ initHtmlFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode,
asyncCacheUpdateDelay = serverCacheDelay,
asyncCacheLogVerbosity = verbosity
}
- browseCache <- newAsyncCacheNF browseTable
- defaultAsyncCachePolicy {
- asyncCacheName = "browse packages",
- asyncCacheUpdateDelay = serverCacheDelay,
- asyncCacheLogVerbosity = verbosity
- }
registerHook itemUpdate $ \_ -> do
prodAsyncCache mainCache "item update"
prodAsyncCache namesCache "item update"
- prodAsyncCache browseCache "item update"
registerHook packageChangeHook $ \_ -> do
prodAsyncCache mainCache "package change"
prodAsyncCache namesCache "package change"
- prodAsyncCache browseCache "package change"
- registerHook groupChangedHook $ \_ -> do
- prodAsyncCache browseCache "package change"
return feature
@@ -223,9 +209,8 @@ htmlFeature :: ServerEnv
-> HtmlUtilities
-> AsyncCache Response
-> AsyncCache Response
- -> AsyncCache BS.ByteString
-> Templates
- -> (HtmlFeature, IO Response, IO Response, IO BS.ByteString)
+ -> (HtmlFeature, IO Response, IO Response)
htmlFeature env@ServerEnv{..}
user
@@ -235,7 +220,7 @@ htmlFeature env@ServerEnv{..}
-- [reverse index disabled] ReverseFeature{..}
tags download
rank
- list@ListFeature{getAllLists, makeItemList}
+ list@ListFeature{getAllLists}
names
mirror distros
docsCore docsCandidates
@@ -243,9 +228,9 @@ htmlFeature env@ServerEnv{..}
reportsCore
usersdetails
utilities@HtmlUtilities{..}
- cachePackagesPage cacheNamesPage cacheBrowseTable
+ cachePackagesPage cacheNamesPage
templates
- = (HtmlFeature{..}, packageIndex, packagesPage, browseTable)
+ = (HtmlFeature{..}, packageIndex, packagesPage)
where
htmlFeatureInterface = (emptyHackageFeature "html") {
featureResources = htmlResources
@@ -259,10 +244,6 @@ htmlFeature env@ServerEnv{..}
cacheDesc = "packages page by name",
getCacheMemSize = memSize <$> readAsyncCache cacheNamesPage
}
- , CacheComponent {
- cacheDesc = "package browse page",
- getCacheMemSize = memSize <$> readAsyncCache cacheBrowseTable
- }
]
, featurePostInit = syncAsyncCache cachePackagesPage
, featureReloadFiles = reloadTemplates templates
@@ -286,7 +267,6 @@ htmlFeature env@ServerEnv{..}
htmlPreferred
cachePackagesPage
cacheNamesPage
- cacheBrowseTable
templates
names
candidates
@@ -300,7 +280,6 @@ htmlFeature env@ServerEnv{..}
candidates templates
htmlPreferred = mkHtmlPreferred utilities core versions
htmlTags = mkHtmlTags utilities core upload user list tags templates
- htmlSearch = mkHtmlSearch utilities core list names cacheBrowseTable templates
htmlResources = concat [
htmlCoreResources htmlCore
@@ -312,7 +291,6 @@ htmlFeature env@ServerEnv{..}
, htmlPreferredResources htmlPreferred
, htmlDownloadsResources htmlDownloads
, htmlTagsResources htmlTags
- , htmlSearchResources htmlSearch
-- and user groups. package maintainers, trustees, admins
, htmlGroupResource user (maintainersGroupResource . uploadResource $ upload)
, htmlGroupResource user (trusteesGroupResource . uploadResource $ upload)
@@ -435,15 +413,6 @@ htmlFeature env@ServerEnv{..}
]
return htmlpage
- browseTable :: IO BS.ByteString
- browseTable = do
- pkgIndex <- queryGetPackageIndex
- let packageNames = sortOn (map toLower . unPackageName) $ Pages.toPackageNames pkgIndex
- pkgDetails <- makeItemList packageNames
- let rowList = map makeRow pkgDetails
- tabledata = "" +++ rowList +++ ""
- return . BS.pack . showHtmlFragment $ tabledata
-
{-
-- Currently unused, mainly because not all web browsers use eager authentication-sending
-- Setting a cookie might work here, albeit one that's stateless for the server, is not
@@ -489,7 +458,6 @@ mkHtmlCore :: ServerEnv
-> HtmlPreferred
-> AsyncCache Response
-> AsyncCache Response
- -> AsyncCache BS.ByteString
-> Templates
-> SearchFeature
-> PackageCandidatesFeature
@@ -516,7 +484,6 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore}
HtmlPreferred{..}
cachePackagesPage
cacheNamesPage
- cacheBrowseTable
templates
SearchFeature{..}
PackageCandidatesFeature{..}
@@ -552,6 +519,9 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore}
resourceDesc = [(GET, "Show browsable list of all packages")]
, resourceGet = [("html", serveBrowsePage)]
}
+ , (extendResource searchPackagesResource) {
+ resourceGet = [("html", serveBrowsePage)]
+ }
, (extendResource $ corePackagesPage cores) {
resourceDesc = [(GET, "Show package index")]
, resourceGet = [("html", const $ readAsyncCache cachePackagesPage)]
@@ -568,12 +538,9 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore}
serveBrowsePage :: DynamicPath -> ServerPartE Response
serveBrowsePage _dpath = do
- template <- getTemplate templates "table-interface.html"
- tabledata <- readAsyncCache cacheBrowseTable
+ template <- getTemplate templates "browse.html"
return $ toResponse $ template
- [ "heading" $= "All packages"
- , templateUnescaped "tabledata" tabledata]
-
+ [ "heading" $= "Browse and search packages" ]
-- Currently the main package page is thrown together by querying a bunch
-- of features about their attributes for the given package. It'll need
@@ -1802,214 +1769,6 @@ mkHtmlTags HtmlUtilities{..}
tagInPath :: forall m a. (MonadPlus m, FromReqURI a) => DynamicPath -> m a
tagInPath dpath = maybe mzero return (lookup "tag" dpath >>= fromReqURI)
-
-{-------------------------------------------------------------------------------
- Search
--------------------------------------------------------------------------------}
-
-data HtmlSearch = HtmlSearch {
- htmlSearchResources :: [Resource]
- }
-
-mkHtmlSearch :: HtmlUtilities
- -> CoreFeature
- -> ListFeature
- -> SearchFeature
- -> AsyncCache BS.ByteString
- -> Templates
- -> HtmlSearch
-mkHtmlSearch HtmlUtilities{..}
- CoreFeature{..}
- ListFeature{makeItemList}
- SearchFeature{..}
- cacheBrowseTable
- templates =
- HtmlSearch{..}
- where
- htmlSearchResources = [
- (extendResource searchPackagesResource) {
- resourceGet = [("html", servePackageFind)]
- }
- ]
-
- servePackageFind :: DynamicPath -> ServerPartE Response
- servePackageFind _ = do
- (mtermsStr, mexplain) <-
- queryString $ (,) <$> optional (look "terms")
- <*> optional (look "explain")
- let explain = isJust mexplain
- case mtermsStr of
- Just termsStr | explain
- , terms <- words termsStr, not (null terms) -> do
- params <- queryString getSearchRankParameters
- results <- searchPackagesExplain params terms
- return $ toResponse $ Resource.XHtml $
- hackagePage "Package search" $
- [ toHtml $ paramsForm params termsStr
- , resetParamsForm termsStr
- , toHtml $ explainResults results
- ]
-
- Just termsStr | terms <- words termsStr -> do
- tabledata <- if null terms
- then readAsyncCache cacheBrowseTable
- else do
- names <- searchPackages terms
- pkgDetails <- liftIO $ makeItemList names
- let rowList = map makeRow pkgDetails
- return . BS.pack . showHtmlFragment $ "" +++ rowList
- template <- getTemplate templates "table-interface.html"
- return $ toResponse $ template
- [ "heading" $= toHtml (searchForm termsStr False)
- , templateUnescaped "tabledata" tabledata
- , "footer" $= alternativeSearchTerms termsStr]
-
- _ ->
- return $ toResponse $ Resource.XHtml $
- hackagePage "Text search" $
- [ toHtml $ searchForm "" explain
- , alternativeSearch
- ]
- where
- searchForm termsStr explain =
- [ h2 << "Package search"
- , form ! [XHtml.method "GET", action "/packages/search"] <<
- [ input ! [value termsStr, name "terms", identifier "terms"]
- , toHtml " "
- , input ! [thetype "submit", value "Search"]
- , if explain then input ! [thetype "hidden", name "explain"]
- else noHtml
- ]
- ]
-
- alternativeSearch =
- paragraph <<
- [ toHtml "Alternatively, if you are looking for a particular function then try "
- , anchor ! [href hoogleBaseLink] << "Hoogle"
- ]
- alternativeSearchTerms termsStr =
- paragraph <<
- [ toHtml "Alternatively, if you are looking for a particular function then try "
- , anchor ! [href (hoogleLink termsStr)] << "Hoogle"
- ]
- hoogleBaseLink = "http://www.haskell.org/hoogle/"
- hoogleLink termsStr = "http://www.haskell.org/hoogle/?hoogle=" <> termsStr
-
- explainResults :: (Maybe PackageName, [(Search.Explanation PkgDocField PkgDocFeatures T.Text, PackageName)]) -> [Html]
- explainResults (exactMatch, results) =
- [ h2 << "Results"
- , h3 << "Exact Matches"
- , maybe noHtml (toHtml . display) exactMatch
- , case results of
- [] -> noHtml
- ((explanation1, _):_) ->
- table ! [ border 1 ] <<
- ( ( tr << tableHeader explanation1)
- : [ tr << tableRow explanation pkgname
- | (explanation, pkgname) <- results ])
- ]
- where
- tableHeader Search.Explanation{..} =
- [ th << "package", th << "overall score" ]
- ++ [ th << (show term ++ " score")
- | (term, _score) <- termScores ]
- ++ [ th << (show term ++ " " ++ show field ++ " score")
- | (term, fieldScores) <- termFieldScores
- , (field, _score) <- fieldScores ]
- ++ [ th << (show feature ++ " score")
- | (feature, _score) <- nonTermScores ]
-
- tableRow Search.Explanation{..} pkgname =
- [ td << display pkgname, td << show overallScore ]
- ++ [ td << show score
- | (_term, score) <- termScores ]
- ++ [ td << show score
- | (_term, fieldScores) <- termFieldScores
- , (_field, score) <- fieldScores ]
- ++ [ td << show score
- | (_feature, score) <- nonTermScores ]
-
- getSearchRankParameters = do
- let defaults = defaultSearchRankParameters
- k1 <- lookRead "k1" `mplus` pure (paramK1 defaults)
- bs <- sequence
- [ lookRead ("b" ++ show field)
- `mplus` pure (paramB defaults field)
- | field <- Ix.range (minBound, maxBound :: PkgDocField) ]
- ws <- sequence
- [ lookRead ("w" ++ show field)
- `mplus` pure (paramFieldWeights defaults field)
- | field <- Ix.range (minBound, maxBound :: PkgDocField) ]
- fs <- sequence
- [ lookRead ("w" ++ show feature)
- `mplus` pure (paramFeatureWeights defaults feature)
- | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) ]
- let barr, warr :: Array PkgDocField Float
- barr = listArray (minBound, maxBound) bs
- warr = listArray (minBound, maxBound) ws
- farr = listArray (minBound, maxBound) fs
- return defaults {
- paramK1 = k1,
- paramB = (barr Array.!),
- paramFieldWeights = (warr Array.!),
- paramFeatureWeights = (farr Array.!)
- }
-
- paramsForm SearchRankParameters{..} termsStr =
- [ h2 << "Package search (tuning & explanation)"
- , form ! [XHtml.method "GET", action "/packages/search"] <<
- [ input ! [value termsStr, name "terms", identifier "terms"]
- , toHtml " "
- , input ! [thetype "submit", value "Search"]
- , input ! [thetype "hidden", name "explain"]
- , simpleTable [] [] $
- makeInput [thetype "text", value (show paramK1)] "k1" "K1 parameter"
- : [ makeInput [thetype "text", value (show (paramB field))]
- ("b" ++ fieldname)
- ("B param for " ++ fieldname)
- ++ makeInput [thetype "text", value (show (paramFieldWeights field)) ]
- ("w" ++ fieldname)
- ("Weight for " ++ fieldname)
- | field <- Ix.range (minBound, maxBound :: PkgDocField)
- , let fieldname = show field
- ]
- ++ [ makeInput [thetype "text", value (show (paramFeatureWeights feature)) ]
- ("w" ++ featurename)
- ("Weight for " ++ featurename)
- | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures)
- , let featurename = show feature ]
- ]
- ]
- resetParamsForm termsStr =
- let SearchRankParameters{..} = defaultSearchRankParameters in
- form ! [XHtml.method "GET", action "/packages/search"] <<
- (concat $
- [ input ! [ thetype "submit", value "Reset parameters" ]
- , input ! [ thetype "hidden", name "terms", value termsStr ]
- , input ! [ thetype "hidden", name "explain" ]
- , input ! [ thetype "hidden", name "k1", value (show paramK1) ] ]
- : [ [ input ! [ thetype "hidden"
- , name ("b" ++ fieldname)
- , value (show (paramB field))
- ]
- , input ! [ thetype "hidden"
- , name ("w" ++ fieldname)
- , value (show (paramFieldWeights field))
- ]
- ]
- | field <- Ix.range (minBound, maxBound :: PkgDocField)
- , let fieldname = show field
- ]
- ++ [ [ input ! [ thetype "hidden"
- , name ("w" ++ featurename)
- , value (show (paramFeatureWeights feature))
- ]
- ]
- | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures)
- , let featurename = show feature
- ])
-
-
{-------------------------------------------------------------------------------
Groups
-------------------------------------------------------------------------------}
diff --git a/src/Distribution/Server/Features/Search.hs b/src/Distribution/Server/Features/Search.hs
index 01b661075..4053375a9 100644
--- a/src/Distribution/Server/Features/Search.hs
+++ b/src/Distribution/Server/Features/Search.hs
@@ -1,13 +1,7 @@
-{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards, RecursiveDo #-}
+{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards #-}
module Distribution.Server.Features.Search (
- SearchFeature(..),
+ SearchFeature(SearchFeature, searchPackages, searchPackagesResource),
initSearchFeature,
-
- -- * Search parameters
- defaultSearchRankParameters,
- SearchEngine.SearchRankParameters(..),
- PkgDocField, PkgDocFeatures,
- BM25F.Explanation(..),
) where
import Distribution.Server.Framework
@@ -17,9 +11,7 @@ import Distribution.Server.Features.Core
import Distribution.Server.Features.PackageList
import Distribution.Server.Features.Search.PkgSearch
-import Distribution.Server.Features.Search.SearchEngine (SearchRankParameters(..))
import qualified Distribution.Server.Features.Search.SearchEngine as SearchEngine
-import qualified Distribution.Server.Features.Search.BM25F as BM25F
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Server.Packages.Types
@@ -39,13 +31,7 @@ data SearchFeature = SearchFeature {
searchPackagesResource :: Resource,
- searchPackages :: forall m. MonadIO m => [String] -> m [PackageName],
- searchPackagesExplain :: forall m. MonadIO m
- => SearchRankParameters PkgDocField PkgDocFeatures
- -> [String]
- -> m (Maybe PackageName,
- [(BM25F.Explanation PkgDocField PkgDocFeatures T.Text
- ,PackageName)])
+ searchPackages :: forall m. MonadIO m => [String] -> m [PackageName]
}
instance IsHackageFeature SearchFeature where
@@ -157,17 +143,6 @@ searchFeature ServerEnv{serverBaseURI} CoreFeature{..} ListFeature{getAllLists}
let results = SearchEngine.query se (map T.pack terms)
return results
- searchPackagesExplain :: MonadIO m
- => SearchRankParameters PkgDocField PkgDocFeatures
- -> [String]
- -> m (Maybe PackageName, [(BM25F.Explanation PkgDocField PkgDocFeatures T.Text, PackageName)])
- searchPackagesExplain params terms = do
- se <- readMemState searchEngineState
- let results = SearchEngine.queryExplain
- (SearchEngine.setRankParams params se)
- (map T.pack terms)
- return results
-
handlerGetOpenSearch :: DynamicPath -> ServerPartE Response
handlerGetOpenSearch _ = do
template <- getTemplate templates "opensearch.xml"
diff --git a/src/Distribution/Server/Features/Search/BM25F.hs b/src/Distribution/Server/Features/Search/BM25F.hs
index 1b4189dbf..817bd2e21 100644
--- a/src/Distribution/Server/Features/Search/BM25F.hs
+++ b/src/Distribution/Server/Features/Search/BM25F.hs
@@ -13,9 +13,6 @@ module Distribution.Server.Features.Search.BM25F (
FeatureFunction(..),
Doc(..),
score,
-
- Explanation(..),
- explain,
) where
import Data.Ix
@@ -113,79 +110,3 @@ applyFeatureFunction :: FeatureFunction -> (Float -> Float)
applyFeatureFunction (LogarithmicFunction p1) = \fi -> log (p1 + fi)
applyFeatureFunction (RationalFunction p1) = \fi -> fi / (p1 + fi)
applyFeatureFunction (SigmoidFunction p1 p2) = \fi -> 1 / (p1 + exp (-fi * p2))
-
-
-------------------
--- Explanation
---
-
--- | A breakdown of the BM25F score, to explain somewhat how it relates to
--- the inputs, and so you can compare the scores of different documents.
---
-data Explanation field feature term = Explanation {
- -- | The overall score is the sum of the 'termScores', 'positionScore'
- -- and 'nonTermScore'
- overallScore :: Float,
-
- -- | There is a score contribution from each query term. This is the
- -- score for the term across all fields in the document (but see
- -- 'termFieldScores').
- termScores :: [(term, Float)],
-{-
- -- | There is a score contribution for positional information. Terms
- -- appearing in the document close together give a bonus.
- positionScore :: [(field, Float)],
--}
- -- | The document can have an inate bonus score independent of the terms
- -- in the query. For example this might be a popularity score.
- nonTermScores :: [(feature, Float)],
-
- -- | This does /not/ contribute to the 'overallScore'. It is an
- -- indication of how the 'termScores' relates to per-field scores.
- -- Note however that the term score for all fields is /not/ simply
- -- sum of the per-field scores. The point of the BM25F scoring function
- -- is that a linear combination of per-field scores is wrong, and BM25F
- -- does a more cunning non-linear combination.
- --
- -- However, it is still useful as an indication to see scores for each
- -- field for a term, to see how the compare.
- --
- termFieldScores :: [(term, [(field, Float)])]
- }
- deriving Show
-
-instance Functor (Explanation field feature) where
- fmap f e@Explanation{..} =
- e {
- termScores = [ (f t, s) | (t, s) <- termScores ],
- termFieldScores = [ (f t, fs) | (t, fs) <- termFieldScores ]
- }
-
-explain :: (Ix field, Bounded field, Ix feature, Bounded feature) =>
- Context term field feature ->
- Doc term field feature -> [term] -> Explanation field feature term
-explain ctx doc ts =
- Explanation {..}
- where
- overallScore = sum (map snd termScores)
--- + sum (map snd positionScore)
- + sum (map snd nonTermScores)
- termScores = [ (t, weightedTermScore ctx doc t) | t <- ts ]
--- positionScore = [ (f, 0) | f <- range (minBound, maxBound) ]
- nonTermScores = [ (feature, weightedNonTermScore ctx doc feature)
- | feature <- range (minBound, maxBound) ]
-
- termFieldScores =
- [ (t, fieldScores)
- | t <- ts
- , let fieldScores =
- [ (f, weightedTermScore ctx' doc t)
- | f <- range (minBound, maxBound)
- , let ctx' = ctx { fieldWeight = fieldWeightOnly f }
- ]
- ]
- fieldWeightOnly f f' | sameField f f' = fieldWeight ctx f'
- | otherwise = 0
-
- sameField f f' = index (minBound, maxBound) f
- == index (minBound, maxBound) f'
diff --git a/src/Distribution/Server/Features/Search/PkgSearch.hs b/src/Distribution/Server/Features/Search/PkgSearch.hs
index 0e1fd8832..46c46d571 100644
--- a/src/Distribution/Server/Features/Search/PkgSearch.hs
+++ b/src/Distribution/Server/Features/Search/PkgSearch.hs
@@ -3,9 +3,6 @@
module Distribution.Server.Features.Search.PkgSearch (
PkgSearchEngine,
initialPkgSearchEngine,
- defaultSearchRankParameters,
- PkgDocField(..),
- PkgDocFeatures,
) where
import Distribution.Server.Features.Search.SearchEngine
diff --git a/src/Distribution/Server/Features/Search/SearchEngine.hs b/src/Distribution/Server/Features/Search/SearchEngine.hs
index f2cfa4078..e060e54c7 100644
--- a/src/Distribution/Server/Features/Search/SearchEngine.hs
+++ b/src/Distribution/Server/Features/Search/SearchEngine.hs
@@ -12,14 +12,6 @@ module Distribution.Server.Features.Search.SearchEngine (
insertDocs,
deleteDoc,
query,
-
- NoFeatures,
- noFeatures,
-
- queryExplain,
- BM25F.Explanation(..),
- setRankParams,
-
invariant,
) where
@@ -96,21 +88,6 @@ initSearchEngine config params =
bm25Context = undefined
}
-setRankParams :: SearchRankParameters field feature ->
- SearchEngine doc key field feature ->
- SearchEngine doc key field feature
-setRankParams params@SearchRankParameters{..} se =
- se {
- searchRankParams = params,
- bm25Context = (bm25Context se) {
- BM25F.paramK1 = paramK1,
- BM25F.paramB = paramB,
- BM25F.fieldWeight = paramFieldWeights,
- BM25F.featureWeight = paramFeatureWeights,
- BM25F.featureFunction = paramFeatureFunctions
- }
- }
-
invariant :: (Ord key, Ix field, Bounded field) =>
SearchEngine doc key field feature -> Bool
invariant SearchEngine{searchIndex} =
@@ -323,83 +300,5 @@ pruneRelevantResults softLimit hardLimit =
-----------------------------
-queryExplain :: (Ix field, Bounded field, Ix feature, Bounded feature, Ord key) =>
- SearchEngine doc key field feature ->
- [Term] -> (Maybe key, [(BM25F.Explanation field feature Term, key)])
-queryExplain se@SearchEngine{ searchIndex,
- searchConfig = SearchConfig{transformQueryTerm, makeKey},
- searchRankParams = SearchRankParameters{..} }
- terms =
-
- -- See 'query' above for explanation. Really we ought to combine them.
- let lookupTerms :: [Term]
- lookupTerms = [ term'
- | term <- terms
- , let transformForField = transformQueryTerm term
- , term' <- nub [ transformForField field
- | field <- range (minBound, maxBound) ]
- ]
-
- exactMatch :: Maybe DocId
- exactMatch = case terms of
- [] -> Nothing
- [x] -> SI.lookupDocKeyReal searchIndex (makeKey x)
- (_:_) -> Nothing
-
- rawresults :: [Maybe (TermId, DocIdSet)]
- rawresults = map (SI.lookupTerm searchIndex) lookupTerms
-
- termids :: [TermId]
- docidsets :: [DocIdSet]
- (termids, docidsets) = unzip (catMaybes rawresults)
-
- unrankedResults :: DocIdSet
- unrankedResults = pruneRelevantResults
- paramResultsetSoftLimit
- paramResultsetHardLimit
- docidsets
-
- in ( fmap (SI.lookupDocId' searchIndex) exactMatch
- , rankExplainResults se termids (DocIdSet.toList unrankedResults)
- )
-
-rankExplainResults :: (Ix field, Bounded field, Ix feature, Bounded feature) =>
- SearchEngine doc key field feature ->
- [TermId] ->
- [DocId] ->
- [(BM25F.Explanation field feature Term, key)]
-rankExplainResults se@SearchEngine{searchIndex} queryTerms docids =
- sortBy (flip compare `on` (BM25F.overallScore . fst))
- [ (explainRelevanceScore se queryTerms doctermids docfeatvals, dockey)
- | docid <- docids
- , let (dockey, doctermids, docfeatvals) = SI.lookupDocId searchIndex docid ]
-
-explainRelevanceScore :: (Ix field, Bounded field, Ix feature, Bounded feature) =>
- SearchEngine doc key field feature ->
- [TermId] ->
- DocTermIds field ->
- DocFeatVals feature ->
- BM25F.Explanation field feature Term
-explainRelevanceScore SearchEngine{bm25Context, searchIndex}
- queryTerms doctermids docfeatvals =
- fmap (SI.getTerm searchIndex) (BM25F.explain bm25Context doc queryTerms)
- where
- doc = indexDocToBM25Doc doctermids docfeatvals
-
------------------------------
-
-data NoFeatures = NoFeatures
- deriving (Eq, Ord, Bounded)
-
-instance Ix NoFeatures where
- range _ = []
- inRange _ _ = False
- index _ _ = -1
-
-noFeatures :: NoFeatures -> a
-noFeatures _ = error "noFeatures"
-
------------------------------
-
instance MemSize key => MemSize (SearchEngine doc key field feature) where
memSize SearchEngine {searchIndex} = 25 + memSize searchIndex
diff --git a/tests/BrowseQueryParserTest.hs b/tests/BrowseQueryParserTest.hs
new file mode 100644
index 000000000..cf1c88338
--- /dev/null
+++ b/tests/BrowseQueryParserTest.hs
@@ -0,0 +1,73 @@
+{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, BlockArguments #-}
+module Main where
+
+import Prelude hiding (Ordering(..))
+
+import Control.Monad.State.Lazy
+import Data.Attoparsec.Text
+import Data.Text (Text)
+import Data.Time (fromGregorian, nominalDay)
+import System.Exit (die)
+
+import Distribution.Server.Features.Browse.Parsers
+
+assertEqual :: forall a b. (Eq a, Eq b, Show a, Show b) => Either a b -> b -> StateT Int IO ()
+assertEqual actual onlyRight = do
+ let reference :: Either a b
+ reference = Right onlyRight
+ if actual /= reference
+ then do
+ lift do
+ putStrLn "Expected"
+ print reference
+ putStrLn "But got"
+ print actual
+ gotten <- get
+ lift . die $ "Failed test " <> show gotten <> " (zero-indexed)"
+ else modify (+1)
+
+assertParses :: Text -> [Condition] -> StateT Int IO ()
+assertParses searchString = assertEqual (parseOnly conditions searchString)
+
+main :: IO ()
+main = do
+ let inp = " dsa( downloads < 100 )( rating > 5.2) test john (lastUpload /= 2000-02-29)"
+ ref =
+ [ SearchTermCond "dsa"
+ , FilterCond (DownloadsFilter (LT, 100))
+ , FilterCond (RatingFilter (GT, 5.2))
+ , SearchTermCond "test"
+ , SearchTermCond "john"
+ , FilterCond (LastUploadFilter (NEQ, fromGregorian 2000 2 29))
+ ]
+ in flip evalStateT 0 do
+ assertEqual (parseOnly searchTerms "test donkey") [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertEqual (parseOnly filterOrSearchTerms "(test donkey)") [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "(test donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "test donkey" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "test donkey" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "test () donkey" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "test (donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "test1 (donkey1)" [ SearchTermCond "test1", SearchTermCond "donkey1" ]
+ assertParses "(test donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ]
+ assertParses "(downloads<=10)" [ FilterCond (DownloadsFilter (LTE, 10)) ]
+ assertParses "(dl<=10)" [ SearchTermCond "dl<=10" ]
+ assertParses "(lastUpload!=9999-12-31)" [FilterCond (LastUploadFilter (NEQ, fromGregorian 9999 12 31))]
+ assertParses "(maintainer:EdwardKmett)" [FilterCond (MaintainerFilter "EdwardKmett")]
+ assertParses "(maintainer:23skidoo)" [FilterCond (MaintainerFilter "23skidoo")]
+ assertParses "(tag:network)" [FilterCond (TagFilter "network")]
+ assertParses "(ageOfLastUpload<5y)" [FilterCond (AgeLastULFilter (LT, nominalDay * 365.25 * 5))]
+ assertParses "(ageOfLastUpload<0.00001d)" [FilterCond (AgeLastULFilter (LT, nominalDay * 0.00001))]
+ assertParses "(rating<=NaN)" [ SearchTermCond "rating<=NaN" ]
+ assertParses "(rating<=-1)" [ SearchTermCond "rating<=-1" ]
+ assertParses "(rating<=-0)" [ FilterCond (RatingFilter (LTE, 0)) ]
+ assertParses "(downloads<-1)" [ SearchTermCond "downloads<-1" ]
+ assertParses "(not maintainer:EdwardKmett)" [ FilterCond (Not (MaintainerFilter "EdwardKmett")) ]
+ assertParses "(not not maintainer:EdwardKmett)" [ SearchTermCond "not", SearchTermCond "not", SearchTermCond "maintainer:EdwardKmett" ]
+ assertParses "(deprecated:true)" [ FilterCond (DeprecatedFilter OnlyDeprecated) ]
+ assertParses "(deprecated:yes)" [ FilterCond (DeprecatedFilter OnlyDeprecated) ]
+ assertParses "(deprecated:false)" [ FilterCond (DeprecatedFilter ExcludeDeprecated) ]
+ assertParses "(deprecated:no)" [ FilterCond (DeprecatedFilter ExcludeDeprecated) ]
+ assertParses "(deprecated:any)" [ FilterCond (DeprecatedFilter Don'tCareAboutDeprecated) ]
+ assertParses "" []
+ assertParses inp ref
diff --git a/tests/Distribution/Server/Packages/UnpackTest.hs b/tests/Distribution/Server/Packages/UnpackTest.hs
index 74f921c10..5db1732f7 100644
--- a/tests/Distribution/Server/Packages/UnpackTest.hs
+++ b/tests/Distribution/Server/Packages/UnpackTest.hs
@@ -18,9 +18,9 @@ deriving instance Eq Tar.FileNameError
deriving instance Eq CombinedTarErrs
-- | Test that check permissions does the right thing
-testPermissions :: FilePath -> -- ^ .tar.gz file to test
- (Tar.Entry -> Maybe CombinedTarErrs) -> -- ^ Converter to create errors if necessary
- Assertion
+testPermissions :: FilePath -- ^ .tar.gz file to test
+ -> (Tar.Entry -> Maybe CombinedTarErrs) -- ^ Converter to create errors if necessary
+ -> Assertion
testPermissions tarPath mangler = do
entries <- return . Tar.read . GZip.decompress =<< BL.readFile tarPath
let mappedEntries = Tar.foldEntries Tar.Next Tar.Done (Tar.Fail . FormatError) entries
diff --git a/tests/PaginationTest.hs b/tests/PaginationTest.hs
new file mode 100644
index 000000000..accd61e16
--- /dev/null
+++ b/tests/PaginationTest.hs
@@ -0,0 +1,38 @@
+module Main where
+
+import Control.Monad (unless)
+import System.Exit (die)
+
+import Distribution.Server.Features.Browse (NumElems(..), PaginationConfig(..), StartIndex(..), paginate)
+
+main :: IO ()
+main = do
+ let res = paginate $ PaginationConfig 10 0
+ unless (res == Just (StartIndex 0, NumElems 10)) $
+ die $ "Mismatch 1 " ++ show res
+
+ -- We don't want to claim that the page 0 is ever out of bounds,
+ -- since it is normal to request page 0 of a listing with 0 results.
+ let res = paginate $ PaginationConfig 0 0
+ unless (res == Just (StartIndex 0, NumElems 0)) $
+ die $ "Mismatch 2 " ++ show res
+
+ let res = paginate $ PaginationConfig 10 1
+ unless (res == Nothing) $
+ die $ "Mismatch 3 " ++ show res
+
+ let res = paginate $ PaginationConfig 51 1
+ unless (res == Just (StartIndex 50, NumElems 1)) $
+ die $ "Mismatch 4 " ++ show res
+
+ let res = paginate $ PaginationConfig 9 0
+ unless (res == Just (StartIndex 0, NumElems 9)) $
+ die $ "Mismatch 5 " ++ show res
+
+ let res = paginate $ PaginationConfig 100 0
+ unless (res == Just (StartIndex 0, NumElems 50)) $
+ die $ "Mismatch 6 " ++ show res
+
+ let res = paginate $ PaginationConfig 100 1
+ unless (res == Just (StartIndex 50, NumElems 50)) $
+ die $ "Mismatch 7 " ++ show res