Be honest about the ldapjs dependency.

`nix-build` can run tests without any problems now. Close #1.
This commit is contained in:
Matvey Aksenov 2015-11-24 20:22:47 +00:00
parent c94763606b
commit 7a1214f773
57 changed files with 1231 additions and 29 deletions

View File

@ -1,14 +1,5 @@
function s:hdevtools_options(rgs)
return join(map(a:rgs, "'-g ' . v:val"))
endfunction
function s:discover_cabal_sandbox(glob)
let l:sandboxes = split(glob(a:glob, "."), "\n")
if len(l:sandboxes) > 0
return ['-no-user-package-db', '-package-db=' . l:sandboxes[-1]]
else
return []
endif
return join(["-s", "/tmp/" . substitute(system("sha1sum <<< $PWD | cut -d' ' -f1"), '\n\+$', '', '') . ".sock"] + map(a:rgs, "'-g ' . v:val"))
endfunction
let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
@ -23,5 +14,5 @@ let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
\ , '-Wall'
\ , '-fno-warn-unused-do-bind'
\ , '-fno-warn-type-defaults'
\ ] + s:discover_cabal_sandbox(".cabal-sandbox/*.conf.d")
\ ]
\ )

View File

@ -1,2 +1,9 @@
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7101" }:
nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./ldap-client.nix {}
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }: let
ghc = nixpkgs.pkgs.haskell.packages.${compiler};
npm = import ./npm {};
in
ghc.callPackage ./package.nix {
mkDerivation = args: ghc.mkDerivation(args // {
buildTools = (if args ? buildTools then args.buildTools else []) ++ [ npm.nodePackages.ldapjs ];
});
}

1
npm/.nixfromnpm-version Normal file
View File

@ -0,0 +1 @@
0.7.0

9
npm/default.nix Normal file
View File

@ -0,0 +1,9 @@
{ nodejsVersion ? "4.1", npm3 ? false, pkgs ? import <nixpkgs> {} }:
let
nodeLib = import ./nodeLib {
inherit pkgs npm3 nodejsVersion;
self = nodeLib;
};
in nodeLib.generatePackages {
rootPath = ./nodePackages;
}

View File

@ -0,0 +1,394 @@
{
# Provides the mkDerivation function.
stdenv,
# Lets us run a command.
runCommand,
# Derivation for nodejs and npm.
nodejs,
# Which version of npm to use.
npm ? nodejs,
# List of required native build inputs.
neededNatives,
# Self-reference for overriding purposes.
buildNodePackage
}:
let
# The path within $out/lib to find a package. If the package does not
# have a namespace, it will simply be in `node_modules`, and otherwise it
# will appear in `node_modules/@namespace`.
modulePath = pkg: if pkg.namespace == null then "node_modules"
else "node_modules/@${pkg.namespace}";
# The path to the package within its modulePath. Just appending the name
# of the package.
pathInModulePath = pkg: "${modulePath pkg}/${pkg.basicName}";
in
{
# Used for private packages. Indicated in the name field of the
# package.json, e.g. "@mynamespace/mypackage". Public packages will not
# need this.
namespace ? null,
# The name of the package. If it's a private package with a namespace,
# this should not contain the namespace.
name,
# Version of the package. This should follow the semver standard, although
# we don't explicitly enforce that in this function.
version,
# Source of the package; can be a tarball or a folder on the filesystem.
src,
# by default name of nodejs interpreter e.g. "nodejs-<version>-${name}"
namePrefix ? "${nodejs.name}-" +
(if namespace == null then "" else "${namespace}-"),
# List or attribute set of dependencies
deps ? {},
# List or attribute set of peer depencies
peerDependencies ? {},
# List or attribute set of optional dependencies
optionalDependencies ? {},
# List of optional dependencies to skip
skipOptionalDependencies ? [],
# List or set of development dependencies (or null).
devDependencies ? null,
# If true and devDependencies are not null, the package will be
# installed contingent on successfully running tests.
doCheck ? devDependencies != null,
# Additional flags passed to npm install
flags ? "",
# Command to be run before shell hook
preShellHook ? "",
# Command to be run after shell hook
postShellHook ? "",
# Same as https://docs.npmjs.com/files/package.json#os
os ? [],
# Same as https://docs.npmjs.com/files/package.json#cpu
cpu ? [],
# Attribute set of already resolved deps (internal),
# for avoiding infinite recursion
resolvedDeps ? {},
...
} @ args:
let
inherit (stdenv.lib) fold removePrefix hasPrefix subtractLists isList flip
intersectLists isAttrs listToAttrs nameValuePair
mapAttrs filterAttrs attrNames elem concatMapStrings
attrValues getVersion flatten remove concatStringsSep;
# whether we should run tests.
shouldTest = doCheck && devDependencies != null;
# The package name as it appears in the package.json. This contains a
# namespace if there is one, so it will be a distinct identifier for
# different packages.
pkgName = if namespace == null then name else "@${namespace}/${name}";
# We create a `self` object for self-referential expressions. It
# bottoms out in a call to `mkDerivation` at the end.
self = let
sources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv $(find . -type d -mindepth 1 -maxdepth 1) $out
'';
platforms = if os == [] then nodejs.meta.platforms else
fold (entry: platforms:
let
filterPlatforms =
stdenv.lib.platforms.${removePrefix "!" entry} or [];
in
# Ignore unknown platforms
if filterPlatforms == [] then (if platforms == [] then nodejs.meta.platforms else platforms)
else
if hasPrefix "!" entry then
subtractLists (intersectLists filterPlatforms nodejs.meta.platforms) platforms
else
platforms ++ (intersectLists filterPlatforms nodejs.meta.platforms)
) [] os;
toAttrSet = obj: if isAttrs obj then obj else
(listToAttrs (map (x: nameValuePair x.name x) obj));
mapDependencies = deps: filterFunc: let
attrDeps = toAttrSet deps;
in rec {
# All required node modules, without already resolved dependencies
# Also override with already resolved dependencies
requiredDeps = mapAttrs (name: dep:
dep.override {resolvedDeps = resolvedDeps // { "${name}" = self; };}
) (filterAttrs filterFunc
(removeAttrs attrDeps (attrNames resolvedDeps)));
# Recursive dependencies that we want to avoid with shim creation
recursiveDeps = filterAttrs filterFunc
(removeAttrs attrDeps (attrNames requiredDeps));
};
# Filter out self-referential dependencies.
_dependencies = mapDependencies deps (name: dep:
dep.pkgName != pkgName);
# Filter out self-referential peer dependencies.
_peerDependencies = mapDependencies peerDependencies (name: dep:
dep.pkgName != pkgName);
# Filter out any optional dependencies which don't build correctly.
_optionalDependencies = mapDependencies optionalDependencies (name: dep:
(builtins.tryEval dep).success &&
!(elem dep.pkgName skipOptionalDependencies)
);
# Required dependencies are those that we haven't filtered yet.
requiredDependencies =
_dependencies.requiredDeps //
_optionalDependencies.requiredDeps //
_peerDependencies.requiredDeps;
recursiveDependencies =
_dependencies.recursiveDeps //
_optionalDependencies.recursiveDeps //
_peerDependencies.recursiveDeps;
npmFlags = concatStringsSep " " ([
# We point the registry at something that doesn't exist. This will
# mean that NPM will fail if any of the dependencies aren't met, as it
# will attempt to hit this registry for the missing dependency.
"--registry=fakeprotocol://notaregistry.$UNIQNAME.derp"
# These flags make failure fast, as otherwise NPM will spin for a while.
"--fetch-retry-mintimeout=0"
"--fetch-retry-maxtimeout=10"
# This will disable any user-level npm configuration.
"--userconfig=/dev/null"
# This flag is used for packages which link against the node headers.
"--nodedir=${sources}"
] ++ (if isList flags then flags else [flags]));
# A bit of bash to check that variables are set.
checkSet = vars: concatStringsSep "\n" (flip map vars (var: ''
[[ -z $${var} ]] && { echo "${var} is not set."; exit 1; }
''));
mkDerivationArgs = {
inherit src;
# Define some environment variables that we will use in the build.
prePatch = ''
export HASHEDNAME=$(echo "$propagatedNativeBuildInputs $name" \
| md5sum | awk '{print $1}')
export UNIQNAME="''${HASHEDNAME:0:10}-${name}-${version}"
export BUILD_DIR=$TMPDIR/$UNIQNAME-build
'';
patchPhase = ''
runHook prePatch
patchShebangs $PWD
# Remove any impure dependencies from the package.json (see script
# for details)
node ${./removeImpureDependencies.js}
# We do not handle shrinkwraps yet
rm npm-shrinkwrap.json 2>/dev/null || true
# Repackage source into a tarball, so npm pre/post publish hooks are
# not triggered,
mkdir -p $BUILD_DIR
GZIP=-1 tar -czf $BUILD_DIR/package.tgz ./
export PATCHED_SRC=$BUILD_DIR/package.tgz
runHook postPatch
'';
configurePhase = ''
runHook preConfigure
(
${checkSet ["BUILD_DIR"]}
mkdir -p $BUILD_DIR
cd $BUILD_DIR
# Symlink or copy dependencies for node modules
# copy is needed if dependency has recursive dependencies,
# because node can't follow symlinks while resolving recursive deps.
${
let
link = dep: ''
${if dep.recursiveDeps == [] then "ln -sfv" else "cp -rf"} \
${dep}/lib/${pathInModulePath dep} ${modulePath dep}
'';
in
flip concatMapStrings (attrValues requiredDependencies) (dep: ''
mkdir -p ${modulePath dep}
${link dep}
${concatMapStrings link (attrValues dep.peerDependencies)}
'')}
# Create shims for recursive dependenceies
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
cat > ${pathInModulePath dep}/package.json <<EOF
{
"name": "${dep.pkgName}",
"version": "${getVersion dep}"
}
EOF
'') (attrValues recursiveDependencies)}
# Create dummy package.json file
cat <<EOF > package.json
{"name":"dummy-for-$UNIQNAME","version":"0.0.0", "license":"MIT",
"description":"Dummy package file for building $name",
"repository":{"type":"git","url":"http://$UNIQNAME.com"}}
EOF
# Create dummy readme
echo "Dummy package" > README.md
)
export HOME=$BUILD_DIR
runHook postConfigure
'';
buildPhase = ''
runHook preBuild
# Install package
(
${checkSet ["BUILD_DIR" "PATCHED_SRC"]}
echo "Building $name in $BUILD_DIR"
cd $BUILD_DIR
HOME=$PWD npm install $PATCHED_SRC ${npmFlags} || {
npm list
exit 1
}
)
runHook postBuild
'';
installPhase = ''
runHook preInstall
(
cd $BUILD_DIR
# Remove shims
${concatMapStrings (dep: ''
rm ${pathInModulePath dep}/package.json
rmdir ${modulePath dep}
'') (attrValues recursiveDependencies)}
# Install the package that we just built.
mkdir -p $out/lib/${modulePath self}
# Move the folder that was created for this path to $out/lib.
mv ${pathInModulePath self} $out/lib/${pathInModulePath self}
# Remove the node_modules subfolder from there, and instead put things
# in $PWD/node_modules into that folder.
rm -rf $out/lib/${pathInModulePath self}/node_modules
cp -r node_modules $out/lib/${pathInModulePath self}/node_modules
if [ -e "$out/lib/${pathInModulePath self}/man" ]; then
mkdir -p $out/share
for dir in $out/lib/${pathInModulePath self}/man/*; do #*/
mkdir -p $out/share/man/$(basename "$dir")
for page in $dir/*; do #*/
ln -sv $page $out/share/man/$(basename "$dir")
done
done
fi
# Move peer dependencies to node_modules
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
mv ${pathInModulePath dep} $out/lib/${modulePath dep}
'') (attrValues _peerDependencies.requiredDeps)}
# Install binaries and patch shebangs. These are always found in
# node_modules/.bin, regardless of a package namespace.
mv node_modules/.bin $out/lib/node_modules 2>/dev/null || true
if [ -d "$out/lib/node_modules/.bin" ]; then
ln -sv $out/lib/node_modules/.bin $out/bin
patchShebangs $out/lib/node_modules/.bin
fi
)
runHook postInstall
'';
shellHook = ''
${preShellHook}
export PATH=${npm}/bin:${nodejs}/bin:$(pwd)/node_modules/.bin:$PATH
mkdir -p node_modules
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
ln -sfv ${dep}/lib/${pathInModulePath dep} ${pathInModulePath dep}
'') (attrValues requiredDependencies)}
${postShellHook}
'';
# Stipping does not make a lot of sense in node packages
dontStrip = true;
meta = {
inherit platforms;
maintainers = [ stdenv.lib.maintainers.offline ];
};
# Propagate pieces of information about the package so that downstream
# packages can reflect on them.
passthru.pkgName = pkgName;
passthru.basicName = name;
passthru.namespace = namespace;
passthru.version = version;
passthru.peerDependencies = _peerDependencies.requiredDeps;
passthru.recursiveDeps =
(flatten (
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
)) ++
(attrNames recursiveDependencies);
# Add an 'override' attribute, which will call `buildNodePackage` with the
# given arguments overridden.
passthru.override = newArgs: buildNodePackage (args // newArgs);
} // (removeAttrs args ["deps" "resolvedDeps" "optionalDependencies"
"devDependencies"]) // {
name = "${namePrefix}${name}-${version}";
# Run the node setup hook when this package is a build input
propagatedNativeBuildInputs = (args.propagatedNativeBuildInputs or []) ++
[ npm nodejs ];
nativeBuildInputs =
(args.nativeBuildInputs or []) ++ neededNatives ++
(attrValues requiredDependencies);
# Expose list of recursive dependencies upstream, up to the package that
# caused recursive dependency
recursiveDeps =
(flatten (
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
)) ++
(attrNames recursiveDependencies);
};
in stdenv.mkDerivation mkDerivationArgs;
in self

210
npm/nodeLib/default.nix Normal file
View File

@ -0,0 +1,210 @@
/*
A set of tools for generating node packages, such as to be imported by
default.nix files generated by nixfromnpm.
*/
{
# Self-reference so that we can pass through to downstream libraries
self,
# Base set of packages, i.e. nixpkgs.
pkgs,
# Version of nodejs.
nodejsVersion ? "4.1",
# Whether to use npm3 (requires a prebuilt tarball of npm3).
npm3 ? true
}:
let
# Function to replace dots with something
replaceDots = c: replaceChars ["."] [c];
inherit (builtins) readDir removeAttrs length getEnv elemAt hasAttr;
inherit (pkgs.lib) attrNames attrValues filterAttrs flip foldl
hasSuffix hasPrefix removeSuffix replaceChars
optional optionals stringToCharacters
concatStrings tail splitString;
inherit (pkgs.stdenv) isLinux;
# Function to remove the first character of a string.
dropFirstChar = str: concatStrings (tail (stringToCharacters str));
# Like a for loop.
for = flip map;
# Concatenate a list of sets.
joinSets = foldl (a: b: a // b) {};
# Extracts a tarball containing a bootstrapped version of npm 3.
# This tarball must have been previously generated by an invocation
# of nixfromnpm, but one of these should be included in the
# nixfromnpm distribution (if not, run the `gen_npm3` script).
npm3-src = pkgs.runCommand "npm3" {src=./npm3.tar.gz;} ''
mkdir -p $out && cd $out && tar -xf $src
'';
# Builds the extracted nix file. Since of course it can't use npm3,
# being that it hasn't been built yet, we disable npm3 for this.
_npm3 = import npm3-src {
inherit pkgs nodejsVersion;
npm3 = false;
};
# Parse the `NPM_AUTH_TOKENS` environment variable to discover
# namespace-token associations and turn them into an attribute set
# which we can use as an input to the fetchPrivateNpm function.
# Split the variable on ':', then turn each k=v element in
# the list into an attribute set and join all of those sets.
namespaceTokens = joinSets (
for (splitString ":" (getEnv "NPM_AUTH_TOKENS")) (kvPair:
let kv = splitString "=" kvPair; in
if length kv != 2 then {}
else {"${elemAt kv 0}" = elemAt kv 1;}));
# A function similar to fetchUrl but allows setting of custom headers.
fetchUrlWithHeaders = pkgs.callPackage ./fetchUrlWithHeaders.nix {};
# Uses the parsed namespace tokens to create a function that can
# fetch a private package from an npm repo.
fetchPrivateNpm = {namespace, headers ? {}, ...}@args:
if !(hasAttr namespace namespaceTokens)
then throw "NPM_AUTH_TOKENS does not contain namespace ${namespace}"
else let
Authorization = "Bearer ${namespaceTokens.${namespace}}";
headers = {inherit Authorization;} // headers;
in
fetchUrlWithHeaders (removeAttrs args ["namespace"] // {inherit headers;});
in
rec {
nodejs = pkgs."nodejs-${replaceDots "_" nodejsVersion}" or (
throw "The given nodejs version ${nodejsVersion} has not been defined."
);
buildNodePackage = import ./buildNodePackage.nix ({
inherit (pkgs) stdenv runCommand;
inherit nodejs buildNodePackage;
neededNatives = [pkgs.python] ++ optionals isLinux [pkgs.utillinux];
} // (if npm3 then {npm = _npm3;} else {}));
# A generic package that will fail to build. This is used to indicate
# packages that are broken, without failing the entire generation of
# a package expression.
brokenPackage = {name, reason}:
let
deriv = pkgs.stdenv.mkDerivation {
name = "BROKEN-${name}";
buildCommand = ''
echo "Package ${name} is broken: ${reason}"
exit 1
'';
passthru.withoutTests = deriv;
passthru.pkgName = name;
passthru.basicName = "BROKEN";
passthru.namespace = null;
passthru.version = "BROKEN";
passthru.override = _: deriv;
passthru.recursiveDeps = [];
passthru.peerDependencies = {};
};
in
deriv;
# List a directory after filtering the files.
lsFilter = pred: dir: attrNames (filterAttrs pred (readDir dir));
# Checks the name and type of a listing to grab non-dotfile dirs.
isRegDir = name: type: type == "directory" && !(hasPrefix "." name);
# Discover all of the node packages in a folder and turn them into a set
# mapping `<name>_<version>` to the expression to build that package.
discoverPackages = {callPackage, rootPath}:
# if true then throw "huh? ${rootPath}" else
let
# Names of NPM packages defined in this directory. Don't take
# files that start with '@'.
nodeDirs = lsFilter (n: t: isRegDir n t && !(hasPrefix "@" n))
(/. + rootPath);
# Generate the package expression from a package name and .nix path.
toPackage = name: filepath: let
versionRaw = removeSuffix ".nix" filepath; # Raw version, i.e. "1.2.4"
# Join with package name to make the variable name.
varName = "${replaceDots "-" name}_${replaceDots "-" versionRaw}";
in
# Return the singleton set which maps that name to the actual expression.
{"${varName}" = callPackage (/. + rootPath + "/${name}/${filepath}") {};};
in
# For each directory, and each .nix file in it, create a package from that.
joinSets (for nodeDirs (pkgName: let
pkgDir = /. + rootPath + "/${pkgName}";
# List of .nix files in the directory (excluding symlinks).
versionFiles = lsFilter (name: type: type == "regular" &&
hasSuffix ".nix" name)
pkgDir;
# Check if there is a `latest.nix` file
hasLatest = lsFilter (n: _: n == "latest.nix") pkgDir != [];
in
joinSets (
# Find all of the versions listed in the folder.
map (toPackage pkgName) versionFiles ++
# If the folder has a `latest.nix` file, link the bare name of
# the package to that file.
optional hasLatest {
"${replaceDots "-" pkgName}" = callPackage
(/. + rootPath + "/${pkgName}/latest.nix") {};
})));
# Same as above, except that we take all of the namespaced packages;
# these packages are in folders prefaced with `@`, and contain
# packages in that folder. So, for example the path `@foo/bar` is
# the path to all of the versions of the `bar` package under the
# namespace `foo`.
discoverNamespacePackages = {callPackage, rootPath}: let
isNsDir = name: type: type == "directory" && hasPrefix "@" name;
# Names of NPM packages defined in this directory.
namespaceDirs = lsFilter isNsDir (/. + rootPath);
in
# For each namespace directory, each package folder in it, and
# each .nix file in that, create a package from that and then
# create a namespace out of that.
joinSets (for namespaceDirs (nsDirName: {
"${dropFirstChar nsDirName}" = discoverPackages {
inherit callPackage;
rootPath = /. + rootPath + "/${nsDirName}";
};
}));
# The function that a default.nix can call into which will scan its
# directory for all of the package files and generate a big attribute set
# for all of them. Re-exports the `callPackage` function and all of the
# attribute sets, as well as the nodeLib.
generatePackages = {rootPath, extensions ? []}:
let
callPackageWith = pkgSet: path: overridingArgs: let
inherit (builtins) intersectAttrs functionArgs;
inherit (pkgs.lib) filterAttrs;
# The path must be a function; import it here.
func = import path;
# Get the arguments to the function; e.g. "{a=false; b=true;}", where
# a false value is an argument that has no default.
funcArgs = functionArgs func;
# Take only the arguments that don't have a default.
noDefaults = filterAttrs (_: v: v == false) funcArgs;
# Intersect this set with the package set to create the arguments to
# the function.
satisfyingArgs = intersectAttrs noDefaults pkgSet;
# Override these arguments with whatever's passed in.
actualArgs = satisfyingArgs // overridingArgs;
# Call the function with these args to get a derivation.
deriv = func actualArgs;
in deriv;
callPackage = callPackageWith {
inherit fetchUrlWithHeaders namespaces namespaceTokens;
inherit pkgs nodePackages buildNodePackage brokenPackage;
};
nodePackages = joinSets (map (e: e.nodePackages) extensions) //
discoverPackages {inherit callPackage rootPath;};
namespaces = joinSets (map (e: e.namespaces) extensions) //
discoverNamespacePackages {inherit callPackage rootPath;};
in {
inherit nodePackages callPackage namespaces namespaceTokens pkgs;
nodeLib = self;
};
}

21
npm/nodeLib/fetch.py Normal file
View File

@ -0,0 +1,21 @@
import os
import requests
out = os.environ['out']
url = os.environ['url']
headers = {"User-Agent": "nix-fetchurl"}
header_names = os.environ.get("headerNames", "")
for name in header_names.split():
if "__HTTP_HEADER_{}".format(name) not in os.environ:
exit("FATAL: no corresponding value set for header {}"
.format(name))
headers[name] = os.environ["__HTTP_HEADER_{}".format(name)]
print('GET {} with headers {}'.format(url, headers))
response = requests.get(url, headers=headers)
if response.status_code != 200:
exit("Received a {} response. :(\nContent: {}"
.format(response.status_code, response.content))
else:
print('Response: {} ({} bytes)'
.format(response.status_code, len(response.content)))
with open(out, 'wb') as f:
f.write(response.content)

View File

@ -0,0 +1,71 @@
# A python-based fetchurl function, allowing the passage of custom headers.
# Just calls into `requests` under the hood.
{
pythonPackages, stdenv
}:
{ # URL to fetch.
url ? ""
, # Additional curl options needed for the download to succeed.
curlOpts ? ""
, # Name of the file. If empty, use the basename of `url' (or of the
# first element of `urls').
name ? ""
# Different ways of specifying the hash.
, outputHash ? ""
, outputHashAlgo ? ""
, md5 ? ""
, sha1 ? ""
, sha256 ? ""
, # Meta information, if any.
meta ? {}
# Headers to set, if any.
, headers ? {}
}:
let
inherit (stdenv.lib) flip mapAttrs' nameValuePair;
hasHash = (outputHash != "" && outputHashAlgo != "")
|| md5 != "" || sha1 != "" || sha256 != "";
# Create an attribute set translating each header name and value into
# the header name prefixed with __HTTP_HEADER. When the derivation is
# evaluated, the script will pick up these environment variables and use
# them to produce the actual headers.
headerValues = flip mapAttrs' headers (headerName: headerValue:
nameValuePair "__HTTP_HEADER_${headerName}" headerValue);
in
if !hasHash
then throw "You must specify the output hash for ${url}"
else
stdenv.mkDerivation ({
inherit url;
name = if name != "" then name else baseNameOf (toString url);
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
outputHash = if outputHash != "" then outputHash else
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
# Only flat hashing, which is the normal mode if you're fetching a file.
outputHashMode = "flat";
# Doing the download on a remote machine just duplicates network
# traffic, so don't do that.
preferLocalBuild = true;
headerNames = builtins.attrNames headers;
buildInputs = with pythonPackages; [python requests2];
buildCommand = ''
python ${./fetch.py}
'';
} // headerValues)

View File

@ -0,0 +1,16 @@
# Parses the `NPM_AUTH_TOKENS` environment variable to discover
# namespace-token associations and turn them into an attribute set
# which we can use as an input to the fetchPrivateNpm function.
{pkgs, joinSets}:
let
inherit (pkgs.lib) flip length elemAt;
npmAuthTokens = builtins.getEnv "NPM_AUTH_TOKENS";
in
# Split the variable on ':', then turn each k=v element in
# the list into an attribute set and join all of those sets.
joinSets (
flip map (split ":" npmAuthTokens) (kvPair:
if length (split "=" kvPair) != 2 then {}
else {"${elemAt kvPair 0}" = elemAt kvPair 1;}))

View File

@ -0,0 +1,46 @@
// These packages come packaged with nodejs.
var fs = require('fs');
var url = require('url');
function versionSpecIsImpure(versionSpec) {
// Returns true if a version spec is impure.
return (versionSpec == "latest" || versionSpec == "unstable" ||
// file path references
versionSpec.substr(0, 2) == ".." ||
versionSpec.substr(0, 2) == "./" ||
versionSpec.substr(0, 2) == "~/" ||
versionSpec.substr(0, 1) == '/' ||
// github owner/repo references
/^[^/]+\/[^/]+(#.*)?$/.test(versionSpec) ||
// is a URL
url.parse(versionSpec).protocol);
}
// Load up the package object.
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
// Purify dependencies.
var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'];
for (var i in depTypes) {
var depType = depTypes[i];
var depSet = packageObj[depType];
if (depSet !== undefined) {
for (var depName in depSet) {
if (versionSpecIsImpure(depSet[depName])) {
depSet[depName] = '*';
}
}
}
}
/* Remove peer dependencies */
if (process.env.removePeerDependencies && packageObj.peerDependencies) {
console.log("WARNING: removing the following peer dependencies:");
for (key in packageObj.peerDependencies) {
console.log(" " + key + ": " + packageObj.peerDependencies[key]);
}
delete packageObj.peerDependencies;
}
/* Write the fixed JSON file */
fs.writeFileSync("package.json", JSON.stringify(packageObj));

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "asn1";
version = "0.2.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz";
sha1 = "dac8787713c9966849fc8180777ebe9c1ddf3b86";
};
deps = [];
meta = {
homepage = "https://github.com/mcavage/node-asn1";
description = "Contains parsers and serializers for ASN.1 (currently BER only)";
};
}

View File

@ -0,0 +1 @@
0.2.3.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "assert-plus";
version = "0.1.5";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz";
sha1 = "ee74009413002d84cec7219c6ac811812e723160";
};
deps = [];
devDependencies = [];
meta = {
description = "Extra assertions on top of node's assert module";
};
}

View File

@ -0,0 +1 @@
0.1.5.nix

View File

@ -0,0 +1,21 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "backoff";
version = "2.4.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/backoff/-/backoff-2.4.1.tgz";
sha1 = "2f68c50e0dd789dbefe24200a62efb04d2456d68";
};
deps = with nodePackages; [
precond_0-2-3
];
meta = {
description = "Fibonacci and exponential backoffs.";
keywords = [
"backoff"
"retry"
"fibonacci"
"exponential"
];
};
}

View File

@ -0,0 +1 @@
2.4.1.nix

View File

@ -0,0 +1,30 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "bunyan";
version = "1.5.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz";
sha1 = "5f6e7d44c43b952f56b0f41309e3ab12391b4e2d";
};
deps = with nodePackages; [
dtrace-provider_0-6-0
safe-json-stringify_1-0-3
mv_2-0-3
];
optionalDependencies = with nodePackages; [
dtrace-provider_0-6-0
safe-json-stringify_1-0-3
mv_2-0-3
];
meta = {
homepage = "https://github.com/trentm/node-bunyan";
description = "a JSON logging library for node.js services";
keywords = [
"log"
"logging"
"log4j"
"json"
"bunyan"
];
};
}

View File

@ -0,0 +1 @@
1.5.1.nix

View File

@ -0,0 +1,24 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "dashdash";
version = "1.10.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/dashdash/-/dashdash-1.10.1.tgz";
sha1 = "0abf1af89a8f5129a81f18c2b35b21df22622f60";
};
deps = with nodePackages; [
assert-plus_0-1-5
];
meta = {
homepage = "https://github.com/trentm/node-dashdash";
description = "A light, featureful and explicit option parsing library.";
keywords = [
"option"
"parser"
"parsing"
"cli"
"command"
"args"
];
};
}

View File

@ -0,0 +1 @@
1.10.1.nix

View File

@ -0,0 +1,17 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "dtrace-provider";
version = "0.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz";
sha1 = "0b078d5517937d873101452d9146737557b75e51";
};
deps = with nodePackages; [
nan_2-1-0
];
meta = {
homepage = "https://github.com/chrisa/node-dtrace-provider#readme";
description = "Native DTrace providers for node.js applications";
keywords = [ "dtrace" ];
};
}

View File

@ -0,0 +1 @@
0.6.0.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "extsprintf";
version = "1.2.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/extsprintf/-/extsprintf-1.2.0.tgz";
sha1 = "5ad946c22f5b32ba7f8cd7426711c6e8a3fc2529";
};
deps = [];
devDependencies = [];
meta = {
homepage = "https://github.com/davepacheco/node-extsprintf";
description = "extended POSIX-style sprintf";
};
}

View File

@ -0,0 +1 @@
1.2.0.nix

View File

@ -0,0 +1,16 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ldap-filter";
version = "0.2.2";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ldap-filter/-/ldap-filter-0.2.2.tgz";
sha1 = "f2b842be0b86da3352798505b31ebcae590d77d0";
};
deps = with nodePackages; [
assert-plus_0-1-5
];
meta = {
homepage = "http://ldapjs.org";
description = "API for handling LDAP-style filters";
};
}

View File

@ -0,0 +1 @@
0.2.2.nix

View File

@ -0,0 +1,28 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ldapjs";
version = "1.0.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ldapjs/-/ldapjs-1.0.0.tgz";
sha1 = "1da2cd5bfb9cb103c1ba516938da971bc2bbc3f2";
};
deps = with nodePackages; [
ldap-filter_0-2-2
asn1_0-2-3
bunyan_1-5-1
once_1-3-2
vasync_1-6-3
dtrace-provider_0-6-0
backoff_2-4-1
assert-plus_0-1-5
verror_1-6-0
dashdash_1-10-1
];
optionalDependencies = with nodePackages; [
dtrace-provider_0-6-0
];
meta = {
homepage = "http://ldapjs.org";
description = "LDAP client and server APIs";
};
}

View File

@ -0,0 +1 @@
1.0.0.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "minimist";
version = "0.0.8";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz";
sha1 = "857fcabfc3397d2625b8228262e86aa7a011b05d";
};
deps = [];
meta = {
homepage = "https://github.com/substack/minimist";
description = "parse argument options";
keywords = [
"argv"
"getopt"
"parser"
"optimist"
];
};
}

View File

@ -0,0 +1 @@
0.0.8.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "mkdirp";
version = "0.5.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz";
sha1 = "30057438eac6cf7f8c4767f38648d6697d75c903";
};
deps = with nodePackages; [
minimist_0-0-8
];
meta = {
homepage = "https://github.com/substack/node-mkdirp#readme";
description = "Recursively mkdir, like `mkdir -p`";
keywords = [
"mkdir"
"directory"
];
};
}

View File

@ -0,0 +1 @@
0.5.1.nix

View File

@ -0,0 +1,26 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "mv";
version = "2.0.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/mv/-/mv-2.0.3.tgz";
sha1 = "e9ab707d71dc38de24edcc637a8e2f5f480c7f32";
};
deps = with nodePackages; [
ncp_0-6-0
mkdirp_0-5-1
rimraf_2-2-8
];
meta = {
homepage = "https://github.com/andrewrk/node-mv";
description = "fs.rename but works across devices. same as the unix utility 'mv'";
keywords = [
"mv"
"move"
"rename"
"device"
"recursive"
"folder"
];
};
}

View File

@ -0,0 +1 @@
2.0.3.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "nan";
version = "2.1.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/nan/-/nan-2.1.0.tgz";
sha1 = "020a7ccedc63fdee85f85967d5607849e74abbe8";
};
deps = [];
meta = {
homepage = "https://github.com/nodejs/nan#readme";
description = "Native Abstractions for Node.js: C++ header for Node 0.8 -> 4 compatibility";
};
}

View File

@ -0,0 +1 @@
2.1.0.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ncp";
version = "0.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ncp/-/ncp-0.6.0.tgz";
sha1 = "df8ce021e262be21b52feb3d3e5cfaab12491f0d";
};
deps = [];
meta = {
homepage = "https://github.com/AvianFlu/ncp";
description = "Asynchronous recursive file copy utility.";
keywords = [ "cli" "copy" ];
};
}

View File

@ -0,0 +1 @@
0.6.0.nix

View File

@ -0,0 +1,22 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "once";
version = "1.3.2";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/once/-/once-1.3.2.tgz";
sha1 = "d8feeca93b039ec1dcdee7741c92bdac5e28081b";
};
deps = with nodePackages; [
wrappy_1-0-1
];
meta = {
homepage = "https://github.com/isaacs/once#readme";
description = "Run a function exactly one time";
keywords = [
"once"
"function"
"one"
"single"
];
};
}

View File

@ -0,0 +1 @@
1.3.2.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "precond";
version = "0.2.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/precond/-/precond-0.2.3.tgz";
sha1 = "aa9591bcaa24923f1e0f4849d240f47efc1075ac";
};
deps = [];
meta = {
description = "Precondition checking utilities.";
keywords = [
"precondition"
"assert"
"invariant"
"contract"
"condition"
];
};
}

View File

@ -0,0 +1 @@
0.2.3.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "rimraf";
version = "2.2.8";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz";
sha1 = "e439be2aaee327321952730f99a8929e4fc50582";
};
deps = [];
devDependencies = [];
meta = {
homepage = "https://github.com/isaacs/rimraf";
description = "A deep deletion module for node (like `rm -rf`)";
};
}

View File

@ -0,0 +1 @@
2.2.8.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "safe-json-stringify";
version = "1.0.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz";
sha1 = "3cb6717660a086d07cb5bd9b7a6875bcf67bd05e";
};
deps = [];
meta = {
homepage = "https://github.com/e-conomic/safe-json-stringify";
description = "Prevent defined property getters from throwing errors";
};
}

View File

@ -0,0 +1 @@
1.0.3.nix

View File

@ -0,0 +1,16 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "vasync";
version = "1.6.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/vasync/-/vasync-1.6.3.tgz";
sha1 = "4a69d7052a47f4ce85503d7641df1cbf40432a94";
};
deps = with nodePackages; [
verror_1-6-0
];
meta = {
homepage = "https://github.com/davepacheco/node-vasync";
description = "utilities for observable asynchronous control flow";
};
}

View File

@ -0,0 +1 @@
1.6.3.nix

View File

@ -0,0 +1,17 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "verror";
version = "1.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/verror/-/verror-1.6.0.tgz";
sha1 = "7d13b27b1facc2e2da90405eb5ea6e5bdd252ea5";
};
deps = with nodePackages; [
extsprintf_1-2-0
];
devDependencies = [];
meta = {
homepage = "https://github.com/davepacheco/node-verror";
description = "richer JavaScript errors";
};
}

View File

@ -0,0 +1 @@
1.6.0.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "wrappy";
version = "1.0.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz";
sha1 = "1e65969965ccbc2db4548c6b84a6f2c5aedd4739";
};
deps = [];
meta = {
homepage = "https://github.com/npm/wrappy";
description = "Callback wrapping utility";
};
}

View File

@ -0,0 +1 @@
1.0.1.nix

View File

@ -1,17 +1,17 @@
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7101" }: let
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }: let
inherit (nixpkgs) pkgs;
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages( ps: with ps; [
hdevtools doctest
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages(ps: [
ps.hdevtools ps.doctest ps.hspec-discover ps.hlint ps.ghc-mod
]);
cabal-install = pkgs.haskell.packages.${compiler}.cabal-install;
pkg = (import ./default.nix { inherit nixpkgs compiler; });
pkg = import ./default.nix { inherit nixpkgs compiler; };
npm = import ./npm {};
in
pkgs.stdenv.mkDerivation rec {
name = pkg.pname;
buildInputs = [ ghc cabal-install ] ++ pkg.env.buildInputs;
buildInputs = [ ghc cabal-install npm.nodePackages.ldapjs ] ++ pkg.env.buildInputs;
shellHook = ''
${pkg.env.shellHook}
export IN_WHICH_NIX_SHELL=${name}
cabal configure --package-db=$NIX_GHC_LIBDIR/package.conf.d
cabal configure --enable-tests --package-db=$NIX_GHC_LIBDIR/package.conf.d
'';
}

View File

@ -16,7 +16,7 @@ spec = do
res `shouldBe` True
res `shouldBe` Right ()
it "compares and looses" $ do
it "compares and loses" $ do
res <- locally $ \l -> do
res <- Ldap.compare l charmander (Attr "type") "flying"
res `shouldBe` False

View File

@ -1,3 +1,4 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module SpecHelper
( locally
@ -22,8 +23,15 @@ module SpecHelper
, oddish
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$))
#endif
import Control.Monad (forever)
import Control.Concurrent (forkIO)
import Control.Exception (bracket)
import System.Environment (getEnvironment)
import System.IO (hGetLine)
import System.IO.Error (tryIOError)
import System.Process (runInteractiveProcess, terminateProcess, waitForProcess)
import Ldap.Client as Ldap
@ -31,12 +39,14 @@ import Ldap.Client as Ldap
locally :: (Ldap -> IO a) -> IO (Either LdapError a)
locally f =
bracket (do (_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
(Just [ ("PORT", show port)
, ("SSL_CERT", "./ssl/cert.pem")
, ("SSL_KEY", "./ssl/key.pem")
])
bracket (do env <- getEnvironment
(_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
(Just (("PORT", show port) :
("SSL_CERT", "./ssl/cert.pem") :
("SSL_KEY", "./ssl/key.pem") :
env))
hGetLine out
forkIO (() <$ tryIOError (forever (hGetLine out >>= putStrLn)))
return h)
(\h -> do terminateProcess h
waitForProcess h)

View File

@ -1,8 +1,20 @@
#!/usr/bin/env js
#!/usr/bin/env node
var fs = require('fs');
var ldapjs = require('ldapjs');
// Stub unimplemented functionality.
ldapjs.ExtensibleFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
ldapjs.ApproximateFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
// Remove superfluous spaces from DNs.
var wrappee = ldapjs.DN.prototype.format;
ldapjs.DN.prototype.format = function(options) {
options = options || this._format;
options['skipSpace'] = true;
return (wrappee.bind(this))(options);
};
var port = process.env.PORT;
var certificate = fs.readFileSync(process.env.SSL_CERT, "utf-8");
var key = fs.readFileSync(process.env.SSL_KEY, "utf-8");
@ -81,8 +93,9 @@ function authorize(req, res, next) {
server.search('o=localhost', [authorize], function(req, res, next) {
for (var i = 0; i < pokemon.length; i++) {
if (req.filter.matches(pokemon[i].attributes))
if (req.filter.matches(pokemon[i].attributes)) {
res.send(pokemon[i]);
}
};
res.end();
@ -163,7 +176,7 @@ server.modifyDN('o=localhost', [], function(req, res, next) {
if (req.dn.toString() === pokemon[i].dn) {
req.dn.rdns[0] = req.newRdn.rdns[0];
pokemon[i].dn = req.dn.toString();
pokemon[i].attributes.cn = req.newRdn.rdns[0].cn;
pokemon[i].attributes.cn = req.newRdn.rdns[0].attrs.cn.value;
}
}