Compare commits
2 commits
main
...
lowercase_
Author | SHA1 | Date | |
---|---|---|---|
ce39432a07 | |||
aac8ee19d7 |
14 changed files with 763 additions and 198 deletions
|
@ -1,3 +1,7 @@
|
|||
---
|
||||
kind: pipeline
|
||||
name: bottin
|
||||
|
||||
steps:
|
||||
- name: build
|
||||
image: golang:stretch
|
||||
|
@ -8,8 +12,14 @@ steps:
|
|||
- go test -i -c -o test
|
||||
|
||||
- name: test_bottin
|
||||
image: consul:1.15.4
|
||||
image: consul:latest
|
||||
environment:
|
||||
BOTTIN_DEFAULT_ADMIN_PW: priZ4Cg0x5NkSyiIN/MpvWw4ZEy8f8s1
|
||||
commands:
|
||||
- ash test/runner.sh
|
||||
|
||||
---
|
||||
kind: signature
|
||||
hmac: ff246a04c3df8a2f39c8b446dea920622d61950e6caaac886931bdb05d0706ed
|
||||
|
||||
...
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -2,5 +2,3 @@ bottin
|
|||
bottin.static
|
||||
config.json
|
||||
test/test
|
||||
result
|
||||
ldap.json
|
||||
|
|
5
Dockerfile
Normal file
5
Dockerfile
Normal file
|
@ -0,0 +1,5 @@
|
|||
FROM scratch
|
||||
|
||||
ADD bottin.static /bottin
|
||||
|
||||
ENTRYPOINT ["/bottin"]
|
|
@ -1,6 +1,6 @@
|
|||
# Bottin
|
||||
|
||||
[![status-badge](https://woodpecker.deuxfleurs.fr/api/badges/38/status.svg)](https://woodpecker.deuxfleurs.fr/repos/38)
|
||||
[![Build Status](https://drone.deuxfleurs.fr/api/badges/Deuxfleurs/bottin/status.svg?ref=refs/heads/main)](https://drone.deuxfleurs.fr/Deuxfleurs/bottin)
|
||||
|
||||
<img src="https://git.deuxfleurs.fr/Deuxfleurs/bottin/raw/branch/main/bottin.png" style="height: 150px; display: block; margin-left: auto; margin-right: auto" />
|
||||
|
||||
|
@ -24,7 +24,7 @@ Features:
|
|||
- Access control through an ACL (hardcoded in the configuration file)
|
||||
|
||||
|
||||
A Docker image is provided on the [Docker hub](https://hub.docker.com/r/dxflrs/bottin) (built in `default.nix`).
|
||||
A Docker image is provided on the [Docker hub](https://hub.docker.com/r/lxpz/bottin_amd64).
|
||||
An example for running Bottin on a Nomad cluster can be found in `bottin.hcl.example`.
|
||||
|
||||
Bottin takes a single command line argument, `-config <filename>`, which is the
|
||||
|
@ -53,7 +53,7 @@ nix-build -A docker
|
|||
|
||||
```bash
|
||||
docker load < $(nix-build -A docker)
|
||||
docker push dxflrs/bottin:???
|
||||
docker push ???
|
||||
```
|
||||
|
||||
## Server initialization
|
||||
|
|
15
default.nix
15
default.nix
|
@ -4,16 +4,10 @@ let
|
|||
url = "https://github.com/NixOS/nixpkgs/archive/d2db10786f27619d5519b12b03fb10dc8ca95e59.tar.gz";
|
||||
sha256 = "0s9gigs3ylnq5b94rfcmxvrmmr3kzhs497gksajf638d5bv7zcl5";
|
||||
};
|
||||
gomod2nix = fetchGit {
|
||||
url = "https://github.com/tweag/gomod2nix.git";
|
||||
ref = "master";
|
||||
rev = "40d32f82fc60d66402eb0972e6e368aeab3faf58";
|
||||
};
|
||||
|
||||
pkgs = import pkgsSrc {
|
||||
overlays = [
|
||||
(self: super: {
|
||||
gomod = super.callPackage "${gomod2nix}/builder/" { };
|
||||
gomod = super.callPackage ./nix/builder { };
|
||||
})
|
||||
];
|
||||
};
|
||||
|
@ -29,8 +23,8 @@ in rec {
|
|||
CGO_ENABLED=0;
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "A cloud-native LDAP server backed by a Consul datastore";
|
||||
homepage = "https://git.deuxfleurs.fr/Deuxfleurs/bottin";
|
||||
description = "Interface web pour gérer le LDAP: changer son mot de passe, ses infos de profil, inviter des gens, administration";
|
||||
homepage = "https://git.deuxfleurs.fr/Deuxfleurs/guichet";
|
||||
license = licenses.gpl3Plus;
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
|
@ -48,8 +42,7 @@ in rec {
|
|||
docker = pkgs.dockerTools.buildImage {
|
||||
name = "dxflrs/bottin";
|
||||
config = {
|
||||
Entrypoint = [ "${pkg}/bottin" ];
|
||||
WorkingDir = "/";
|
||||
Cmd = [ "${pkg}/bottin" ];
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
79
flake.lock
79
flake.lock
|
@ -1,79 +0,0 @@
|
|||
{
|
||||
"nodes": {
|
||||
"gomod2nix": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1655245309,
|
||||
"narHash": "sha256-d/YPoQ/vFn1+GTmSdvbSBSTOai61FONxB4+Lt6w/IVI=",
|
||||
"owner": "tweag",
|
||||
"repo": "gomod2nix",
|
||||
"rev": "40d32f82fc60d66402eb0972e6e368aeab3faf58",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "tweag",
|
||||
"repo": "gomod2nix",
|
||||
"rev": "40d32f82fc60d66402eb0972e6e368aeab3faf58",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1653581809,
|
||||
"narHash": "sha256-Uvka0V5MTGbeOfWte25+tfRL3moECDh1VwokWSZUdoY=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "83658b28fe638a170a19b8933aa008b30640fbd1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1669764884,
|
||||
"narHash": "sha256-1qWR/5+WtqxSedrFbUbM3zPMO7Ec2CGWaxtK4z4DdvY=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0244e143dc943bcf661fdaf581f01eb0f5000fcf",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0244e143dc943bcf661fdaf581f01eb0f5000fcf",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"gomod2nix": "gomod2nix",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"locked": {
|
||||
"lastModified": 1653893745,
|
||||
"narHash": "sha256-0jntwV3Z8//YwuOjzhV2sgJJPt+HY6KhU7VZUL0fKZQ=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "1ed9fb1935d260de5fe1c2f7ee0ebaae17ed2fa1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
39
flake.nix
39
flake.nix
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
description = "A cloud-native LDAP server backed by a Consul datastore";
|
||||
|
||||
inputs.nixpkgs.url = "github:nixos/nixpkgs/0244e143dc943bcf661fdaf581f01eb0f5000fcf";
|
||||
inputs.gomod2nix.url = "github:tweag/gomod2nix/40d32f82fc60d66402eb0972e6e368aeab3faf58";
|
||||
|
||||
outputs = { self, nixpkgs, gomod2nix }:
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
system = "x86_64-linux";
|
||||
overlays = [
|
||||
(self: super: {
|
||||
gomod = super.callPackage "${gomod2nix}/builder/" { };
|
||||
})
|
||||
];
|
||||
};
|
||||
bottin = pkgs.gomod.buildGoApplication {
|
||||
pname = "bottin";
|
||||
version = "0.1.0";
|
||||
src = builtins.filterSource
|
||||
(path: type: (builtins.match ".*/test/.*\\.(go|sum|mod)" path) == null)
|
||||
./.;
|
||||
modules = ./gomod2nix.toml;
|
||||
|
||||
CGO_ENABLED=0;
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "A cloud-native LDAP server backed by a Consul datastore";
|
||||
homepage = "https://git.deuxfleurs.fr/Deuxfleurs/bottin";
|
||||
license = licenses.gpl3Plus;
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
packages.x86_64-linux.bottin = bottin;
|
||||
packages.x86_64-linux.default = self.packages.x86_64-linux.bottin;
|
||||
};
|
||||
}
|
387
nix/builder/default.nix
Normal file
387
nix/builder/default.nix
Normal file
|
@ -0,0 +1,387 @@
|
|||
{ stdenv
|
||||
, stdenvNoCC
|
||||
, runCommand
|
||||
, buildEnv
|
||||
, lib
|
||||
, fetchgit
|
||||
, removeReferencesTo
|
||||
, jq
|
||||
, cacert
|
||||
, pkgs
|
||||
, pkgsBuildBuild
|
||||
}:
|
||||
let
|
||||
|
||||
inherit (builtins) substring toJSON hasAttr trace split readFile elemAt;
|
||||
inherit (lib)
|
||||
concatStringsSep replaceStrings removePrefix optionalString pathExists
|
||||
optional concatMapStrings fetchers filterAttrs mapAttrs mapAttrsToList
|
||||
warnIf optionalAttrs platforms
|
||||
;
|
||||
|
||||
parseGoMod = import ./parser.nix;
|
||||
|
||||
removeExpr = refs: ''remove-references-to ${concatMapStrings (ref: " -t ${ref}") refs}'';
|
||||
|
||||
# Internal only build-time attributes
|
||||
internal =
|
||||
let
|
||||
mkInternalPkg = name: src: pkgsBuildBuild.runCommand "gomod2nix-${name}"
|
||||
{
|
||||
inherit (pkgsBuildBuild.go) GOOS GOARCH;
|
||||
nativeBuildInputs = [ pkgsBuildBuild.go ];
|
||||
} ''
|
||||
export HOME=$(mktemp -d)
|
||||
cp ${src} src.go
|
||||
go build -o $out src.go
|
||||
'';
|
||||
in
|
||||
{
|
||||
|
||||
# Create a symlink tree of vendored sources
|
||||
symlink = mkInternalPkg "symlink" ./symlink/symlink.go;
|
||||
|
||||
# Install development dependencies from tools.go
|
||||
install = mkInternalPkg "symlink" ./install/install.go;
|
||||
|
||||
};
|
||||
|
||||
fetchGoModule =
|
||||
{ hash
|
||||
, goPackagePath
|
||||
, version
|
||||
, go ? pkgs.go
|
||||
}:
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "${baseNameOf goPackagePath}_${version}";
|
||||
builder = ./fetch.sh;
|
||||
inherit goPackagePath version;
|
||||
nativeBuildInputs = [ go jq ];
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = null;
|
||||
outputHash = hash;
|
||||
SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||
impureEnvVars = fetchers.proxyImpureEnvVars ++ [ "GOPROXY" ];
|
||||
};
|
||||
|
||||
mkVendorEnv =
|
||||
{ go
|
||||
, modulesStruct
|
||||
, localReplaceCommands ? [ ]
|
||||
, defaultPackage ? ""
|
||||
, goMod
|
||||
, pwd
|
||||
}:
|
||||
let
|
||||
localReplaceCommands =
|
||||
let
|
||||
localReplaceAttrs = filterAttrs (n: v: hasAttr "path" v) goMod.replace;
|
||||
commands = (
|
||||
mapAttrsToList
|
||||
(name: value: (
|
||||
''
|
||||
mkdir -p $(dirname vendor/${name})
|
||||
ln -s ${pwd + "/${value.path}"} vendor/${name}
|
||||
''
|
||||
))
|
||||
localReplaceAttrs);
|
||||
in
|
||||
if goMod != null then commands else [ ];
|
||||
|
||||
sources = mapAttrs
|
||||
(goPackagePath: meta: fetchGoModule {
|
||||
goPackagePath = meta.replaced or goPackagePath;
|
||||
inherit (meta) version hash;
|
||||
inherit go;
|
||||
})
|
||||
modulesStruct.mod;
|
||||
in
|
||||
runCommand "vendor-env"
|
||||
{
|
||||
nativeBuildInputs = [ go ];
|
||||
json = toJSON (filterAttrs (n: _: n != defaultPackage) modulesStruct.mod);
|
||||
|
||||
sources = toJSON (filterAttrs (n: _: n != defaultPackage) sources);
|
||||
|
||||
passthru = {
|
||||
inherit sources;
|
||||
};
|
||||
|
||||
passAsFile = [ "json" "sources" ];
|
||||
}
|
||||
(
|
||||
''
|
||||
mkdir vendor
|
||||
|
||||
export GOCACHE=$TMPDIR/go-cache
|
||||
export GOPATH="$TMPDIR/go"
|
||||
|
||||
${internal.symlink}
|
||||
${concatStringsSep "\n" localReplaceCommands}
|
||||
|
||||
mv vendor $out
|
||||
''
|
||||
);
|
||||
|
||||
# Select Go attribute based on version specified in go.mod
|
||||
selectGo = attrs: goMod: attrs.go or (if goMod == null then pkgs.go else
|
||||
(
|
||||
let
|
||||
goVersion = goMod.go;
|
||||
goAttr = "go_" + (replaceStrings [ "." ] [ "_" ] goVersion);
|
||||
in
|
||||
(
|
||||
if hasAttr goAttr pkgs then pkgs.${goAttr}
|
||||
else trace "go.mod specified Go version ${goVersion} but doesn't exist. Falling back to ${pkgs.go.version}." pkgs.go
|
||||
)
|
||||
));
|
||||
|
||||
# Strip the rubbish that Go adds to versions, and fall back to a version based on the date if it's a placeholder value
|
||||
stripVersion = version:
|
||||
let
|
||||
parts = elemAt (split "(\\+|-)" (removePrefix "v" version));
|
||||
v = parts 0;
|
||||
d = parts 2;
|
||||
in
|
||||
if v != "0.0.0" then v else "unstable-" + (concatStringsSep "-" [
|
||||
(substring 0 4 d)
|
||||
(substring 4 2 d)
|
||||
(substring 6 2 d)
|
||||
]);
|
||||
|
||||
mkGoEnv =
|
||||
{ pwd
|
||||
}@attrs:
|
||||
let
|
||||
goMod = parseGoMod (readFile "${toString pwd}/go.mod");
|
||||
modulesStruct = fromTOML (readFile "${toString pwd}/gomod2nix.toml");
|
||||
|
||||
go = selectGo attrs goMod;
|
||||
|
||||
vendorEnv = mkVendorEnv {
|
||||
inherit go modulesStruct pwd goMod;
|
||||
};
|
||||
|
||||
in
|
||||
stdenv.mkDerivation (removeAttrs attrs [ "pwd" ] // {
|
||||
name = "${baseNameOf goMod.module}-env";
|
||||
|
||||
dontUnpack = true;
|
||||
dontConfigure = true;
|
||||
dontInstall = true;
|
||||
|
||||
propagatedNativeBuildInputs = [ go ];
|
||||
|
||||
GO_NO_VENDOR_CHECKS = "1";
|
||||
|
||||
GO111MODULE = "on";
|
||||
GOFLAGS = "-mod=vendor";
|
||||
|
||||
preferLocalBuild = true;
|
||||
|
||||
buildPhase = ''
|
||||
mkdir $out
|
||||
|
||||
export GOCACHE=$TMPDIR/go-cache
|
||||
export GOPATH="$out"
|
||||
export GOSUMDB=off
|
||||
export GOPROXY=off
|
||||
|
||||
'' + optionalString (pathExists (pwd + "/tools.go")) ''
|
||||
mkdir source
|
||||
cp ${pwd + "/go.mod"} source/go.mod
|
||||
cp ${pwd + "/go.sum"} source/go.sum
|
||||
cp ${pwd + "/tools.go"} source/tools.go
|
||||
cd source
|
||||
ln -s ${vendorEnv} vendor
|
||||
|
||||
${internal.install}
|
||||
'';
|
||||
});
|
||||
|
||||
buildGoApplication =
|
||||
{ modules ? pwd + "/gomod2nix.toml"
|
||||
, src ? pwd
|
||||
, pwd ? null
|
||||
, nativeBuildInputs ? [ ]
|
||||
, allowGoReference ? false
|
||||
, meta ? { }
|
||||
, passthru ? { }
|
||||
, tags ? [ ]
|
||||
|
||||
# needed for buildFlags{,Array} warning
|
||||
, buildFlags ? ""
|
||||
, buildFlagsArray ? ""
|
||||
|
||||
, ...
|
||||
}@attrs:
|
||||
let
|
||||
modulesStruct = fromTOML (readFile modules);
|
||||
|
||||
goModPath = "${toString pwd}/go.mod";
|
||||
|
||||
goMod =
|
||||
if pwd != null && pathExists goModPath
|
||||
then parseGoMod (readFile goModPath)
|
||||
else null;
|
||||
|
||||
go = selectGo attrs goMod;
|
||||
|
||||
removeReferences = [ ] ++ optional (!allowGoReference) go;
|
||||
|
||||
defaultPackage = modulesStruct.goPackagePath or "";
|
||||
|
||||
vendorEnv = mkVendorEnv {
|
||||
inherit go modulesStruct defaultPackage goMod pwd;
|
||||
};
|
||||
|
||||
in
|
||||
warnIf (buildFlags != "" || buildFlagsArray != "")
|
||||
"Use the `ldflags` and/or `tags` attributes instead of `buildFlags`/`buildFlagsArray`"
|
||||
stdenv.mkDerivation
|
||||
(optionalAttrs (defaultPackage != "")
|
||||
{
|
||||
pname = attrs.pname or baseNameOf defaultPackage;
|
||||
version = stripVersion (modulesStruct.mod.${defaultPackage}).version;
|
||||
src = vendorEnv.passthru.sources.${defaultPackage};
|
||||
} // optionalAttrs (hasAttr "subPackages" modulesStruct) {
|
||||
subPackages = modulesStruct.subPackages;
|
||||
} // attrs // {
|
||||
nativeBuildInputs = [ removeReferencesTo go ] ++ nativeBuildInputs;
|
||||
|
||||
inherit (go) GOOS GOARCH;
|
||||
|
||||
GO_NO_VENDOR_CHECKS = "1";
|
||||
|
||||
GO111MODULE = "on";
|
||||
GOFLAGS = "-mod=vendor";
|
||||
|
||||
configurePhase = attrs.configurePhase or ''
|
||||
runHook preConfigure
|
||||
|
||||
export GOCACHE=$TMPDIR/go-cache
|
||||
export GOPATH="$TMPDIR/go"
|
||||
export GOSUMDB=off
|
||||
export GOPROXY=off
|
||||
cd "$modRoot"
|
||||
if [ -n "${vendorEnv}" ]; then
|
||||
rm -rf vendor
|
||||
ln -s ${vendorEnv} vendor
|
||||
fi
|
||||
|
||||
runHook postConfigure
|
||||
'';
|
||||
|
||||
buildPhase = attrs.buildPhase or ''
|
||||
runHook preBuild
|
||||
|
||||
exclude='\(/_\|examples\|Godeps\|testdata'
|
||||
if [[ -n "$excludedPackages" ]]; then
|
||||
IFS=' ' read -r -a excludedArr <<<$excludedPackages
|
||||
printf -v excludedAlternates '%s\\|' "''${excludedArr[@]}"
|
||||
excludedAlternates=''${excludedAlternates%\\|} # drop final \| added by printf
|
||||
exclude+='\|'"$excludedAlternates"
|
||||
fi
|
||||
exclude+='\)'
|
||||
|
||||
buildGoDir() {
|
||||
local d; local cmd;
|
||||
cmd="$1"
|
||||
d="$2"
|
||||
. $TMPDIR/buildFlagsArray
|
||||
local OUT
|
||||
if ! OUT="$(go $cmd $buildFlags "''${buildFlagsArray[@]}" ''${tags:+-tags=${concatStringsSep "," tags}} ''${ldflags:+-ldflags="$ldflags"} -v -p $NIX_BUILD_CORES $d 2>&1)"; then
|
||||
if echo "$OUT" | grep -qE 'imports .*?: no Go files in'; then
|
||||
echo "$OUT" >&2
|
||||
return 1
|
||||
fi
|
||||
if ! echo "$OUT" | grep -qE '(no( buildable| non-test)?|build constraints exclude all) Go (source )?files'; then
|
||||
echo "$OUT" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
if [ -n "$OUT" ]; then
|
||||
echo "$OUT" >&2
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
getGoDirs() {
|
||||
local type;
|
||||
type="$1"
|
||||
if [ -n "$subPackages" ]; then
|
||||
echo "$subPackages" | sed "s,\(^\| \),\1./,g"
|
||||
else
|
||||
find . -type f -name \*$type.go -exec dirname {} \; | grep -v "/vendor/" | sort --unique | grep -v "$exclude"
|
||||
fi
|
||||
}
|
||||
|
||||
if (( "''${NIX_DEBUG:-0}" >= 1 )); then
|
||||
buildFlagsArray+=(-x)
|
||||
fi
|
||||
|
||||
if [ ''${#buildFlagsArray[@]} -ne 0 ]; then
|
||||
declare -p buildFlagsArray > $TMPDIR/buildFlagsArray
|
||||
else
|
||||
touch $TMPDIR/buildFlagsArray
|
||||
fi
|
||||
if [ -z "$enableParallelBuilding" ]; then
|
||||
export NIX_BUILD_CORES=1
|
||||
fi
|
||||
for pkg in $(getGoDirs ""); do
|
||||
echo "Building subPackage $pkg"
|
||||
buildGoDir install "$pkg"
|
||||
done
|
||||
'' + optionalString (stdenv.hostPlatform != stdenv.buildPlatform) ''
|
||||
# normalize cross-compiled builds w.r.t. native builds
|
||||
(
|
||||
dir=$GOPATH/bin/${go.GOOS}_${go.GOARCH}
|
||||
if [[ -n "$(shopt -s nullglob; echo $dir/*)" ]]; then
|
||||
mv $dir/* $dir/..
|
||||
fi
|
||||
if [[ -d $dir ]]; then
|
||||
rmdir $dir
|
||||
fi
|
||||
)
|
||||
'' + ''
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
doCheck = attrs.doCheck or true;
|
||||
checkPhase = attrs.checkPhase or ''
|
||||
runHook preCheck
|
||||
|
||||
for pkg in $(getGoDirs test); do
|
||||
buildGoDir test $checkFlags "$pkg"
|
||||
done
|
||||
|
||||
runHook postCheck
|
||||
'';
|
||||
|
||||
installPhase = attrs.installPhase or ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out
|
||||
dir="$GOPATH/bin"
|
||||
[ -e "$dir" ] && cp -r $dir $out
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
preFixup = (attrs.preFixup or "") + ''
|
||||
find $out/{bin,libexec,lib} -type f 2>/dev/null | xargs -r ${removeExpr removeReferences} || true
|
||||
'';
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
disallowedReferences = optional (!allowGoReference) go;
|
||||
|
||||
passthru = { inherit go vendorEnv; } // passthru;
|
||||
|
||||
meta = { platforms = go.meta.platforms or platforms.all; } // meta;
|
||||
});
|
||||
|
||||
in
|
||||
{
|
||||
inherit buildGoApplication mkGoEnv;
|
||||
}
|
13
nix/builder/fetch.sh
Normal file
13
nix/builder/fetch.sh
Normal file
|
@ -0,0 +1,13 @@
|
|||
source $stdenv/setup
|
||||
|
||||
export HOME=$(mktemp -d)
|
||||
|
||||
# Call once first outside of subshell for better error reporting
|
||||
go mod download "$goPackagePath@$version"
|
||||
|
||||
dir=$(go mod download --json "$goPackagePath@$version" | jq -r .Dir)
|
||||
|
||||
chmod -R +w $dir
|
||||
find $dir -iname ".ds_store" | xargs -r rm -rf
|
||||
|
||||
cp -r $dir $out
|
57
nix/builder/install/install.go
Normal file
57
nix/builder/install/install.go
Normal file
|
@ -0,0 +1,57 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const filename = "tools.go"
|
||||
|
||||
func main() {
|
||||
fset := token.NewFileSet()
|
||||
|
||||
var src []byte
|
||||
{
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
src, err = io.ReadAll(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
f, err := parser.ParseFile(fset, filename, src, parser.ImportsOnly)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, s := range f.Imports {
|
||||
path, err := strconv.Unquote(s.Path.Value)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cmd := exec.Command("go", "install", path)
|
||||
|
||||
fmt.Printf("Executing '%s'\n", cmd)
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = cmd.Wait()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
141
nix/builder/parser.nix
Normal file
141
nix/builder/parser.nix
Normal file
|
@ -0,0 +1,141 @@
|
|||
# Parse go.mod in Nix
|
||||
# Returns a Nix structure with the contents of the go.mod passed in
|
||||
# in normalised form.
|
||||
|
||||
let
|
||||
inherit (builtins) elemAt mapAttrs split foldl' match filter typeOf hasAttr length;
|
||||
|
||||
# Strip lines with comments & other junk
|
||||
stripStr = s: elemAt (split "^ *" (elemAt (split " *$" s) 0)) 2;
|
||||
stripLines = initialLines: foldl' (acc: f: f acc) initialLines [
|
||||
# Strip comments
|
||||
(lines: map
|
||||
(l: stripStr (elemAt (splitString "//" l) 0))
|
||||
lines)
|
||||
|
||||
# Strip leading tabs characters
|
||||
(lines: map (l: elemAt (match "(\t)?(.*)" l) 1) lines)
|
||||
|
||||
# Filter empty lines
|
||||
(filter (l: l != ""))
|
||||
];
|
||||
|
||||
# Parse lines into a structure
|
||||
parseLines = lines: (foldl'
|
||||
(acc: l:
|
||||
let
|
||||
m = match "([^ )]*) *(.*)" l;
|
||||
directive = elemAt m 0;
|
||||
rest = elemAt m 1;
|
||||
|
||||
# Maintain parser state (inside parens or not)
|
||||
inDirective =
|
||||
if rest == "(" then directive
|
||||
else if rest == ")" then null
|
||||
else acc.inDirective
|
||||
;
|
||||
|
||||
in
|
||||
{
|
||||
data = (acc.data // (
|
||||
if directive == "" && rest == ")" then { }
|
||||
else if inDirective != null && rest == "(" && ! hasAttr inDirective acc.data then {
|
||||
${inDirective} = { };
|
||||
}
|
||||
else if rest == "(" || rest == ")" then { }
|
||||
else if inDirective != null then {
|
||||
${inDirective} = acc.data.${inDirective} // { ${directive} = rest; };
|
||||
} else if directive == "replace" then
|
||||
(
|
||||
let
|
||||
segments = split " => " rest;
|
||||
getSegment = elemAt segments;
|
||||
in
|
||||
assert length segments == 3; {
|
||||
replace = acc.data.replace // {
|
||||
${getSegment 0} = "=> ${getSegment 2}";
|
||||
};
|
||||
}
|
||||
)
|
||||
else {
|
||||
${directive} = rest;
|
||||
}
|
||||
)
|
||||
);
|
||||
inherit inDirective;
|
||||
})
|
||||
{
|
||||
inDirective = null;
|
||||
data = {
|
||||
require = { };
|
||||
replace = { };
|
||||
exclude = { };
|
||||
};
|
||||
}
|
||||
lines
|
||||
).data;
|
||||
|
||||
normaliseDirectives = data: (
|
||||
let
|
||||
normaliseString = s:
|
||||
let
|
||||
m = builtins.match "([^ ]+) (.+)" s;
|
||||
in
|
||||
{
|
||||
${elemAt m 0} = elemAt m 1;
|
||||
};
|
||||
require = data.require or { };
|
||||
replace = data.replace or { };
|
||||
exclude = data.exclude or { };
|
||||
in
|
||||
data // {
|
||||
require =
|
||||
if typeOf require == "string" then normaliseString require
|
||||
else require;
|
||||
replace =
|
||||
if typeOf replace == "string" then normaliseString replace
|
||||
else replace;
|
||||
}
|
||||
);
|
||||
|
||||
parseVersion = ver:
|
||||
let
|
||||
m = elemAt (match "([^-]+)-?([^-]*)-?([^-]*)" ver);
|
||||
v = elemAt (match "([^+]+)\\+?(.*)" (m 0));
|
||||
in
|
||||
{
|
||||
version = v 0;
|
||||
versionSuffix = v 1;
|
||||
date = m 1;
|
||||
rev = m 2;
|
||||
};
|
||||
|
||||
parseReplace = data: (
|
||||
data // {
|
||||
replace =
|
||||
mapAttrs
|
||||
(_: v:
|
||||
let
|
||||
m = match "=> ([^ ]+) (.+)" v;
|
||||
m2 = match "=> (.*+)" v;
|
||||
in
|
||||
if m != null then {
|
||||
goPackagePath = elemAt m 0;
|
||||
version = elemAt m 1;
|
||||
} else {
|
||||
path = elemAt m2 0;
|
||||
})
|
||||
data.replace;
|
||||
}
|
||||
);
|
||||
|
||||
splitString = sep: s: filter (t: t != [ ]) (split sep s);
|
||||
|
||||
in
|
||||
contents:
|
||||
foldl' (acc: f: f acc) (splitString "\n" contents) [
|
||||
stripLines
|
||||
parseLines
|
||||
normaliseDirectives
|
||||
parseReplace
|
||||
]
|
110
nix/builder/symlink/symlink.go
Normal file
110
nix/builder/symlink/symlink.go
Normal file
|
@ -0,0 +1,110 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type Package struct {
|
||||
GoPackagePath string `json:"-"`
|
||||
Version string `json:"version"`
|
||||
Hash string `json:"hash"`
|
||||
ReplacedPath string `json:"replaced,omitempty"`
|
||||
}
|
||||
|
||||
// type Output struct {
|
||||
// SchemaVersion int `json:"schema"`
|
||||
// Mod map[string]*Package `json:"mod"`
|
||||
// }
|
||||
|
||||
func main() {
|
||||
|
||||
// var output Output
|
||||
sources := make(map[string]string)
|
||||
pkgs := make(map[string]*Package)
|
||||
|
||||
{
|
||||
b, err := ioutil.ReadFile(os.Getenv("sourcesPath"))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(b, &sources)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
b, err := ioutil.ReadFile(os.Getenv("jsonPath"))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(b, &pkgs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
keys := make([]string, 0, len(pkgs))
|
||||
for key := range pkgs {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
// Iterate, in reverse order
|
||||
for i := len(keys) - 1; i >= 0; i-- {
|
||||
key := keys[i]
|
||||
src := sources[key]
|
||||
|
||||
paths := []string{key}
|
||||
|
||||
for _, path := range paths {
|
||||
|
||||
vendorDir := filepath.Join("vendor", filepath.Dir(path))
|
||||
if err := os.MkdirAll(vendorDir, 0755); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(filepath.Join("vendor", path)); err == nil {
|
||||
files, err := ioutil.ReadDir(src)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
innerSrc := filepath.Join(src, f.Name())
|
||||
dst := filepath.Join("vendor", path, f.Name())
|
||||
if err := os.Symlink(innerSrc, dst); err != nil {
|
||||
// assume it's an existing directory, try to link the directory content instead.
|
||||
// TODO should we do this recursively
|
||||
files, err := ioutil.ReadDir(innerSrc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for _, f := range files {
|
||||
if err := os.Symlink(filepath.Join(innerSrc, f.Name()), filepath.Join(dst, f.Name())); err != nil {
|
||||
fmt.Println("ignore symlink error", filepath.Join(innerSrc, f.Name()), filepath.Join(dst, f.Name()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// If the file doesn't already exist, just create a simple symlink
|
||||
err := os.Symlink(src, filepath.Join("vendor", path))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
78
ssha.go
78
ssha.go
|
@ -2,16 +2,10 @@ package main
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
//"github.com/jsimonetti/pwscheme/ssha"
|
||||
"github.com/jsimonetti/pwscheme/ssha"
|
||||
"github.com/jsimonetti/pwscheme/ssha256"
|
||||
"github.com/jsimonetti/pwscheme/ssha512"
|
||||
)
|
||||
|
@ -22,6 +16,8 @@ const (
|
|||
SSHA512 = "{SSHA512}"
|
||||
)
|
||||
|
||||
var scheme = regexp.MustCompile(`^{([0-9a-zA-Z]+)}`)
|
||||
|
||||
// Encode encodes the string to ssha512
|
||||
func SSHAEncode(rawPassPhrase string) (string, error) {
|
||||
return ssha512.Generate(rawPassPhrase, 16)
|
||||
|
@ -29,17 +25,16 @@ func SSHAEncode(rawPassPhrase string) (string, error) {
|
|||
|
||||
// Matches matches the encoded password and the raw password
|
||||
func SSHAMatches(encodedPassPhrase string, rawPassPhrase string) (bool, error) {
|
||||
hashType, err := determineHashType(encodedPassPhrase)
|
||||
cleanEncodedPass := uppercaseScheme(encodedPassPhrase)
|
||||
|
||||
hashType, err := determineHashType(cleanEncodedPass)
|
||||
if err != nil {
|
||||
return false, errors.New("invalid password hash stored")
|
||||
}
|
||||
|
||||
var is_ok bool
|
||||
switch hashType {
|
||||
case SSHA:
|
||||
is_ok = LegacySSHAMatches(encodedPassPhrase, []byte(rawPassPhrase))
|
||||
return is_ok, nil
|
||||
//return ssha.Validate(rawPassPhrase, encodedPassPhrase)
|
||||
return ssha.Validate(rawPassPhrase, encodedPassPhrase)
|
||||
case SSHA256:
|
||||
return ssha256.Validate(rawPassPhrase, encodedPassPhrase)
|
||||
case SSHA512:
|
||||
|
@ -50,64 +45,21 @@ func SSHAMatches(encodedPassPhrase string, rawPassPhrase string) (bool, error) {
|
|||
}
|
||||
|
||||
func determineHashType(hash string) (string, error) {
|
||||
if len(hash) >= 7 && strings.ToUpper(string(hash[0:6])) == SSHA {
|
||||
if len(hash) >= 7 && string(hash[0:6]) == SSHA {
|
||||
return SSHA, nil
|
||||
}
|
||||
if len(hash) >= 10 && strings.ToUpper(string(hash[0:9])) == SSHA256 {
|
||||
if len(hash) >= 10 && string(hash[0:9]) == SSHA256 {
|
||||
return SSHA256, nil
|
||||
}
|
||||
if len(hash) >= 10 && strings.ToUpper(string(hash[0:9])) == SSHA512 {
|
||||
if len(hash) >= 10 && string(hash[0:9]) == SSHA512 {
|
||||
return SSHA512, nil
|
||||
}
|
||||
|
||||
return "", errors.New("no valid hash found")
|
||||
}
|
||||
|
||||
// --- legacy
|
||||
|
||||
// Encode encodes the []byte of raw password
|
||||
func LegacySSHAEncode(rawPassPhrase []byte) string {
|
||||
hash := legacyMakeSSHAHash(rawPassPhrase, legacyMakeSalt())
|
||||
b64 := base64.StdEncoding.EncodeToString(hash)
|
||||
return fmt.Sprintf("{ssha}%s", b64)
|
||||
}
|
||||
|
||||
// Matches matches the encoded password and the raw password
|
||||
func LegacySSHAMatches(encodedPassPhrase string, rawPassPhrase []byte) bool {
|
||||
if !strings.EqualFold(encodedPassPhrase[:6], "{ssha}") {
|
||||
return false
|
||||
}
|
||||
|
||||
bhash, err := base64.StdEncoding.DecodeString(encodedPassPhrase[6:])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
salt := bhash[20:]
|
||||
|
||||
newssha := legacyMakeSSHAHash(rawPassPhrase, salt)
|
||||
|
||||
if bytes.Compare(newssha, bhash) != 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// makeSalt make a 32 byte array containing random bytes.
|
||||
func legacyMakeSalt() []byte {
|
||||
sbytes := make([]byte, 32)
|
||||
_, err := rand.Read(sbytes)
|
||||
if err != nil {
|
||||
log.Panicf("Could not read random bytes: %s", err)
|
||||
}
|
||||
return sbytes
|
||||
}
|
||||
|
||||
// makeSSHAHash make hasing using SHA-1 with salt. This is not the final output though. You need to append {SSHA} string with base64 of this hash.
|
||||
func legacyMakeSSHAHash(passphrase, salt []byte) []byte {
|
||||
sha := sha1.New()
|
||||
sha.Write(passphrase)
|
||||
sha.Write(salt)
|
||||
|
||||
h := sha.Sum(nil)
|
||||
return append(h, salt...)
|
||||
func uppercaseScheme(hash string) string {
|
||||
return scheme.ReplaceAllStringFunc(hash, func(w string) string {
|
||||
return strings.ToUpper(w)
|
||||
})
|
||||
}
|
||||
|
|
17
ssha_test.go
Normal file
17
ssha_test.go
Normal file
|
@ -0,0 +1,17 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestUpper(t *testing.T) {
|
||||
r1 := uppercaseScheme("{ssha}ss4654654")
|
||||
if r1 != "{SSHA}ss4654654" {
|
||||
t.Fatal(r1)
|
||||
}
|
||||
|
||||
r2 := uppercaseScheme("{ssha512}fdbgf6546")
|
||||
if r2 != "{SSHA512}fdbgf6546" {
|
||||
t.Fatal(r2)
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue