From 9d2b6ee10ec5359cc91769d430485c8c869ba1a8 Mon Sep 17 00:00:00 2001 From: Akshay Date: Thu, 24 Dec 2020 10:51:40 +0530 Subject: monorepo --- frontend/.direnv/flake-profile | 1 + frontend/.direnv/flake-profile.rc | 1473 ++++++++++++++++++++++++++++++++ frontend/elm-stuff/0.19.1/Cart.elmi | Bin 0 -> 4274 bytes frontend/elm-stuff/0.19.1/Cart.elmo | Bin 0 -> 6973 bytes frontend/elm-stuff/0.19.1/Catalog.elmi | Bin 0 -> 4112 bytes frontend/elm-stuff/0.19.1/Catalog.elmo | Bin 0 -> 5002 bytes frontend/elm-stuff/0.19.1/Login.elmi | Bin 0 -> 2307 bytes frontend/elm-stuff/0.19.1/Login.elmo | Bin 0 -> 5241 bytes frontend/elm-stuff/0.19.1/Main.elmi | Bin 0 -> 19391 bytes frontend/elm-stuff/0.19.1/Main.elmo | Bin 0 -> 14316 bytes frontend/elm-stuff/0.19.1/Product.elmi | Bin 0 -> 11250 bytes frontend/elm-stuff/0.19.1/Product.elmo | Bin 0 -> 15801 bytes frontend/elm-stuff/0.19.1/Signup.elmi | Bin 0 -> 3892 bytes frontend/elm-stuff/0.19.1/Signup.elmo | Bin 0 -> 8736 bytes frontend/elm-stuff/0.19.1/d.dat | Bin 0 -> 2696 bytes frontend/elm-stuff/0.19.1/i.dat | Bin 0 -> 122650 bytes frontend/elm-stuff/0.19.1/lock | 0 frontend/elm-stuff/0.19.1/o.dat | Bin 0 -> 482300 bytes frontend/elm.json | 27 + frontend/src/Cart.elm | 164 ++++ frontend/src/Catalog.elm | 125 +++ frontend/src/Login.elm | 119 +++ frontend/src/Main.elm | 339 ++++++++ frontend/src/Product.elm | 302 +++++++ frontend/src/Signup.elm | 194 +++++ 25 files changed, 2744 insertions(+) create mode 120000 frontend/.direnv/flake-profile create mode 100644 frontend/.direnv/flake-profile.rc create mode 100644 frontend/elm-stuff/0.19.1/Cart.elmi create mode 100644 frontend/elm-stuff/0.19.1/Cart.elmo create mode 100644 frontend/elm-stuff/0.19.1/Catalog.elmi create mode 100644 frontend/elm-stuff/0.19.1/Catalog.elmo create mode 100644 frontend/elm-stuff/0.19.1/Login.elmi create mode 100644 frontend/elm-stuff/0.19.1/Login.elmo create mode 100644 frontend/elm-stuff/0.19.1/Main.elmi create mode 100644 frontend/elm-stuff/0.19.1/Main.elmo create mode 100644 frontend/elm-stuff/0.19.1/Product.elmi create mode 100644 frontend/elm-stuff/0.19.1/Product.elmo create mode 100644 frontend/elm-stuff/0.19.1/Signup.elmi create mode 100644 frontend/elm-stuff/0.19.1/Signup.elmo create mode 100644 frontend/elm-stuff/0.19.1/d.dat create mode 100644 frontend/elm-stuff/0.19.1/i.dat create mode 100644 frontend/elm-stuff/0.19.1/lock create mode 100644 frontend/elm-stuff/0.19.1/o.dat create mode 100644 frontend/elm.json create mode 100644 frontend/src/Cart.elm create mode 100644 frontend/src/Catalog.elm create mode 100644 frontend/src/Login.elm create mode 100644 frontend/src/Main.elm create mode 100644 frontend/src/Product.elm create mode 100644 frontend/src/Signup.elm (limited to 'frontend') diff --git a/frontend/.direnv/flake-profile b/frontend/.direnv/flake-profile new file mode 120000 index 0000000..6e8a8e9 --- /dev/null +++ b/frontend/.direnv/flake-profile @@ -0,0 +1 @@ +/nix/store/m4z2i7h8x7xwac09q8gw4km9vhmi88ka-furby-env \ No newline at end of file diff --git a/frontend/.direnv/flake-profile.rc b/frontend/.direnv/flake-profile.rc new file mode 100644 index 0000000..dff177f --- /dev/null +++ b/frontend/.direnv/flake-profile.rc @@ -0,0 +1,1473 @@ +unset shellHook +nix_saved_PATH="$PATH" +AR=ar +export AR +AS=as +export AS +BASH=/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin/bash +CC=gcc +export CC +CONFIG_SHELL=/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin/bash +export CONFIG_SHELL +CXX=g++ +export CXX +DIRSTACK=() +GROUPS=() +HOSTTYPE=x86_64 +HOST_PATH=/nix/store/603kisq28l3prqr92z5hffh7fmwwsc9f-elm-0.19.1/bin:/nix/store/amj35xgl7bhifnih0mh39j8kbvsvg2xn-node__at_elm-tooling_slash_elm-language-server-1.6.3/bin:/nix/store/hzxpv4ha44w9pg8ynkfsgjhi3kb13h27-elm-format-0.8.3/bin:/nix/store/w3sh9aki5wrchcrq2wlg2xb394f8brh4-node_elm-oracle-1.1.1/bin:/nix/store/3yl8y2fkc1wsyqm008is2pdw5dja2icy-node_elm-test-0.19.1-revision2/bin:/nix/store/x0jla3hpxrwz76hy9yckg1iyc9hns81k-coreutils-8.31/bin:/nix/store/97vambzyvpvrd9wgrrw7i7svi0s8vny5-findutils-4.7.0/bin:/nix/store/dqq1bvpi3g0h4v05111b3i0ymqj4v5x1-diffutils-3.7/bin:/nix/store/p34p7ysy84579lndk7rbrz6zsfr03y71-gnused-4.8/bin:/nix/store/b0vjq4r4sp9z4l2gbkc5dyyw5qfgyi3r-gnugrep-3.4/bin:/nix/store/c8balm59sxfkw9ik1fqbkadsvjqhmbx4-gawk-5.0.1/bin:/nix/store/g7dr83wnkx4gxa5ykcljc5jg04416z60-gnutar-1.32/bin:/nix/store/kkvgr3avpp7yd5hzmc4syh43jqj03sgb-gzip-1.10/bin:/nix/store/rw96psqzgyqrcd12qr6ivk9yiskjm3ab-bzip2-1.0.6.0.1-bin/bin:/nix/store/dp6y0n9cba79wwc54n1brg7xbjsq5hka-gnumake-4.2.1/bin:/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin:/nix/store/xac1zfclx1xxgcd84vqb6hy3apl171n8-patch-2.7.6/bin:/nix/store/mm0w8jc58rn01c4kz2n9jvwd6bibcihs-xz-5.2.4-bin/bin +export HOST_PATH +IFS=$' \t\n' +IN_NIX_SHELL=impure +export IN_NIX_SHELL +LD=ld +export LD +MACHTYPE=x86_64-unknown-linux-gnu +NIX_BINTOOLS=/nix/store/n48b8n251dwwb04q7f3fwxdmirsakllz-binutils-wrapper-2.31.1 +export NIX_BINTOOLS +NIX_BINTOOLS_WRAPPER_x86_64_unknown_linux_gnu_TARGET_HOST=1 +export NIX_BINTOOLS_WRAPPER_x86_64_unknown_linux_gnu_TARGET_HOST +NIX_BUILD_CORES=8 +export NIX_BUILD_CORES +NIX_CC=/nix/store/m6h7zh8w6s52clnyskffj5lbkakqgywn-gcc-wrapper-9.2.0 +export NIX_CC +NIX_CC_WRAPPER_x86_64_unknown_linux_gnu_TARGET_HOST=1 +export NIX_CC_WRAPPER_x86_64_unknown_linux_gnu_TARGET_HOST +NIX_ENFORCE_NO_NATIVE=1 +export NIX_ENFORCE_NO_NATIVE +NIX_HARDENING_ENABLE='fortify stackprotector pic strictoverflow format relro bindnow' +export NIX_HARDENING_ENABLE +NIX_INDENT_MAKE=1 +export NIX_INDENT_MAKE +NIX_LDFLAGS='-rpath /home/np/code/elmstuff/app-furby/outputs/out/lib64 -rpath /home/np/code/elmstuff/app-furby/outputs/out/lib ' +export NIX_LDFLAGS +NIX_LIB64_IN_SELF_RPATH=1 +NIX_NO_SELF_RPATH=1 +NIX_STORE=/nix/store +export NIX_STORE +NM=nm +export NM +OBJCOPY=objcopy +export OBJCOPY +OBJDUMP=objdump +export OBJDUMP +OPTERR=1 +OPTIND=1 +OSTYPE=linux-gnu +PATH=/nix/store/71n1xcigc00w3z7yc836jqcx9cb2dys8-patchelf-0.9/bin:/nix/store/m6h7zh8w6s52clnyskffj5lbkakqgywn-gcc-wrapper-9.2.0/bin:/nix/store/b3zsk4ihlpiimv3vff86bb5bxghgdzb9-gcc-9.2.0/bin:/nix/store/0k65d30z9xsixil10yw3bwajbdk4yskv-glibc-2.30-bin/bin:/nix/store/x0jla3hpxrwz76hy9yckg1iyc9hns81k-coreutils-8.31/bin:/nix/store/n48b8n251dwwb04q7f3fwxdmirsakllz-binutils-wrapper-2.31.1/bin:/nix/store/hrkc2sf2883l16d5yq3zg0y339kfw4xv-binutils-2.31.1/bin:/nix/store/0k65d30z9xsixil10yw3bwajbdk4yskv-glibc-2.30-bin/bin:/nix/store/x0jla3hpxrwz76hy9yckg1iyc9hns81k-coreutils-8.31/bin:/nix/store/603kisq28l3prqr92z5hffh7fmwwsc9f-elm-0.19.1/bin:/nix/store/amj35xgl7bhifnih0mh39j8kbvsvg2xn-node__at_elm-tooling_slash_elm-language-server-1.6.3/bin:/nix/store/hzxpv4ha44w9pg8ynkfsgjhi3kb13h27-elm-format-0.8.3/bin:/nix/store/w3sh9aki5wrchcrq2wlg2xb394f8brh4-node_elm-oracle-1.1.1/bin:/nix/store/3yl8y2fkc1wsyqm008is2pdw5dja2icy-node_elm-test-0.19.1-revision2/bin:/nix/store/x0jla3hpxrwz76hy9yckg1iyc9hns81k-coreutils-8.31/bin:/nix/store/97vambzyvpvrd9wgrrw7i7svi0s8vny5-findutils-4.7.0/bin:/nix/store/dqq1bvpi3g0h4v05111b3i0ymqj4v5x1-diffutils-3.7/bin:/nix/store/p34p7ysy84579lndk7rbrz6zsfr03y71-gnused-4.8/bin:/nix/store/b0vjq4r4sp9z4l2gbkc5dyyw5qfgyi3r-gnugrep-3.4/bin:/nix/store/c8balm59sxfkw9ik1fqbkadsvjqhmbx4-gawk-5.0.1/bin:/nix/store/g7dr83wnkx4gxa5ykcljc5jg04416z60-gnutar-1.32/bin:/nix/store/kkvgr3avpp7yd5hzmc4syh43jqj03sgb-gzip-1.10/bin:/nix/store/rw96psqzgyqrcd12qr6ivk9yiskjm3ab-bzip2-1.0.6.0.1-bin/bin:/nix/store/dp6y0n9cba79wwc54n1brg7xbjsq5hka-gnumake-4.2.1/bin:/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin:/nix/store/xac1zfclx1xxgcd84vqb6hy3apl171n8-patch-2.7.6/bin:/nix/store/mm0w8jc58rn01c4kz2n9jvwd6bibcihs-xz-5.2.4-bin/bin +export PATH +PIPESTATUS=([0]="0") +PS4='+ ' +RANLIB=ranlib +export RANLIB +READELF=readelf +export READELF +SHELL=/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin/bash +export SHELL +SIZE=size +export SIZE +SOURCE_DATE_EPOCH=1 +export SOURCE_DATE_EPOCH +STRINGS=strings +export STRINGS +STRIP=strip +export STRIP +_=export +buildInputs='/nix/store/603kisq28l3prqr92z5hffh7fmwwsc9f-elm-0.19.1 /nix/store/amj35xgl7bhifnih0mh39j8kbvsvg2xn-node__at_elm-tooling_slash_elm-language-server-1.6.3 /nix/store/hzxpv4ha44w9pg8ynkfsgjhi3kb13h27-elm-format-0.8.3 /nix/store/w3sh9aki5wrchcrq2wlg2xb394f8brh4-node_elm-oracle-1.1.1 /nix/store/3yl8y2fkc1wsyqm008is2pdw5dja2icy-node_elm-test-0.19.1-revision2' +export buildInputs +builder=/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin/bash +export builder +commonStripFlags=--enable-deterministic-archives +configureFlags= +export configureFlags +defaultBuildInputs= +defaultNativeBuildInputs='/nix/store/71n1xcigc00w3z7yc836jqcx9cb2dys8-patchelf-0.9 /nix/store/mjjy30kxz775bhhi6j9phw81qh6dsbrf-move-docs.sh /nix/store/kxw6q8v6isaqjm702d71n2421cxamq68-make-symlinks-relative.sh /nix/store/rvg5a5nwa7cihpmbzlwzh931w3g4q108-compress-man-pages.sh /nix/store/4ygqr4w06zwcd2kcxa6w3441jijv0pvx-strip.sh /nix/store/g6hzqyjd3ricwbs0bbx4806fiwg15vnc-patch-shebangs.sh /nix/store/cickvswrvann041nqxb0rxilc46svw1n-prune-libtool-files.sh /nix/store/8zxndz5ag0p6s526c2xyllhk1nrn4c3i-audit-tmpdir.sh /nix/store/aknix5zw9cj7hd1m3h1d6nnmncl1vkvn-multiple-outputs.sh /nix/store/kd4xwxjpjxi71jkm6ka0np72if9rm3y0-move-sbin.sh /nix/store/fyaryjvghbkpfnsyw97hb3lyb37s1pd6-move-lib64.sh /nix/store/ngg1cv31c8c7bcm2n8ww4g06nq7s4zhm-set-source-date-epoch-to-latest.sh /nix/store/m6h7zh8w6s52clnyskffj5lbkakqgywn-gcc-wrapper-9.2.0' +depsBuildBuild= +export depsBuildBuild +depsBuildBuildPropagated= +export depsBuildBuildPropagated +depsBuildTarget= +export depsBuildTarget +depsBuildTargetPropagated= +export depsBuildTargetPropagated +depsHostHost= +export depsHostHost +depsHostHostPropagated= +export depsHostHostPropagated +depsTargetTarget= +export depsTargetTarget +depsTargetTargetPropagated= +export depsTargetTargetPropagated +doCheck= +export doCheck +doInstallCheck= +export doInstallCheck +dontAddDisableDepTrack=1 +export dontAddDisableDepTrack +envHostHostHooks=([0]="ccWrapper_addCVars" [1]="bintoolsWrapper_addLDVars") +envHostTargetHooks=([0]="ccWrapper_addCVars" [1]="bintoolsWrapper_addLDVars") +fixupOutputHooks=([0]="if [ -z \"\${dontPatchELF-}\" ]; then patchELF \"\$prefix\"; fi" [1]="_makeSymlinksRelative" [2]="if [ -z \"\${dontGzipMan-}\" ]; then compressManPages \"\$prefix\"; fi" [3]="_doStrip" [4]="patchShebangsAuto" [5]="_pruneLibtoolFiles" [6]="if [[ -z \"\${noAuditTmpdir-}\" && -e \"\$prefix\" ]]; then auditTmpdir \"\$prefix\"; fi" [7]="_moveSbin" [8]="_moveLib64") +initialPath='/nix/store/x0jla3hpxrwz76hy9yckg1iyc9hns81k-coreutils-8.31 /nix/store/97vambzyvpvrd9wgrrw7i7svi0s8vny5-findutils-4.7.0 /nix/store/dqq1bvpi3g0h4v05111b3i0ymqj4v5x1-diffutils-3.7 /nix/store/p34p7ysy84579lndk7rbrz6zsfr03y71-gnused-4.8 /nix/store/b0vjq4r4sp9z4l2gbkc5dyyw5qfgyi3r-gnugrep-3.4 /nix/store/c8balm59sxfkw9ik1fqbkadsvjqhmbx4-gawk-5.0.1 /nix/store/g7dr83wnkx4gxa5ykcljc5jg04416z60-gnutar-1.32 /nix/store/kkvgr3avpp7yd5hzmc4syh43jqj03sgb-gzip-1.10 /nix/store/rw96psqzgyqrcd12qr6ivk9yiskjm3ab-bzip2-1.0.6.0.1-bin /nix/store/dp6y0n9cba79wwc54n1brg7xbjsq5hka-gnumake-4.2.1 /nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23 /nix/store/xac1zfclx1xxgcd84vqb6hy3apl171n8-patch-2.7.6 /nix/store/mm0w8jc58rn01c4kz2n9jvwd6bibcihs-xz-5.2.4-bin' +name=furby +export name +nativeBuildInputs= +export nativeBuildInputs +out=/home/np/code/elmstuff/app-furby/outputs/out +export out +outputBin=out +outputDev=out +outputDevdoc=REMOVE +outputDevman=out +outputDoc=out +outputInclude=out +outputInfo=out +outputLib=out +outputMan=out +outputs=out +export outputs +patches= +export patches +pkg=/nix/store/m6h7zh8w6s52clnyskffj5lbkakqgywn-gcc-wrapper-9.2.0 +pkgsBuildHost=([0]="/nix/store/71n1xcigc00w3z7yc836jqcx9cb2dys8-patchelf-0.9" [1]="/nix/store/mjjy30kxz775bhhi6j9phw81qh6dsbrf-move-docs.sh" [2]="/nix/store/kxw6q8v6isaqjm702d71n2421cxamq68-make-symlinks-relative.sh" [3]="/nix/store/rvg5a5nwa7cihpmbzlwzh931w3g4q108-compress-man-pages.sh" [4]="/nix/store/4ygqr4w06zwcd2kcxa6w3441jijv0pvx-strip.sh" [5]="/nix/store/g6hzqyjd3ricwbs0bbx4806fiwg15vnc-patch-shebangs.sh" [6]="/nix/store/cickvswrvann041nqxb0rxilc46svw1n-prune-libtool-files.sh" [7]="/nix/store/8zxndz5ag0p6s526c2xyllhk1nrn4c3i-audit-tmpdir.sh" [8]="/nix/store/aknix5zw9cj7hd1m3h1d6nnmncl1vkvn-multiple-outputs.sh" [9]="/nix/store/kd4xwxjpjxi71jkm6ka0np72if9rm3y0-move-sbin.sh" [10]="/nix/store/fyaryjvghbkpfnsyw97hb3lyb37s1pd6-move-lib64.sh" [11]="/nix/store/ngg1cv31c8c7bcm2n8ww4g06nq7s4zhm-set-source-date-epoch-to-latest.sh" [12]="/nix/store/m6h7zh8w6s52clnyskffj5lbkakqgywn-gcc-wrapper-9.2.0" [13]="/nix/store/n48b8n251dwwb04q7f3fwxdmirsakllz-binutils-wrapper-2.31.1") +pkgsHostTarget=([0]="/nix/store/603kisq28l3prqr92z5hffh7fmwwsc9f-elm-0.19.1" [1]="/nix/store/amj35xgl7bhifnih0mh39j8kbvsvg2xn-node__at_elm-tooling_slash_elm-language-server-1.6.3" [2]="/nix/store/hzxpv4ha44w9pg8ynkfsgjhi3kb13h27-elm-format-0.8.3" [3]="/nix/store/w3sh9aki5wrchcrq2wlg2xb394f8brh4-node_elm-oracle-1.1.1" [4]="/nix/store/3yl8y2fkc1wsyqm008is2pdw5dja2icy-node_elm-test-0.19.1-revision2") +postFixupHooks=([0]="_multioutPropagateDev") +postUnpackHooks=([0]="_updateSourceDateEpochFromSourceRoot") +preConfigureHooks=([0]="_multioutConfig") +preFixupHooks=([0]="_moveToShare" [1]="_multioutDocs" [2]="_multioutDevs") +prefix=/home/np/code/elmstuff/app-furby/outputs/out +propagatedBuildDepFiles=([0]="propagated-build-build-deps" [1]="propagated-native-build-inputs" [2]="propagated-build-target-deps") +propagatedBuildInputs= +export propagatedBuildInputs +propagatedHostDepFiles=([0]="propagated-host-host-deps" [1]="propagated-build-inputs") +propagatedNativeBuildInputs= +export propagatedNativeBuildInputs +propagatedTargetDepFiles=([0]="propagated-target-target-deps") +shell=/nix/store/hrpvwkjz04s9i4nmli843hyw9z4pwhww-bash-4.4-p23/bin/bash +export shell +src=./. +export src +stdenv=/nix/store/sm7kk5n84vaisqvhk1yfsjqls50j8s0m-stdenv-linux +export stdenv +strictDeps= +export strictDeps +system=x86_64-linux +export system +unpackCmdHooks=([0]="_defaultUnpack") +PATH="$PATH:$nix_saved_PATH" +_activatePkgs () +{ + local -i hostOffset targetOffset; + local pkg; + for hostOffset in "${allPlatOffsets[@]}"; + do + local pkgsVar="${pkgAccumVarVars[$hostOffset + 1]}"; + for targetOffset in "${allPlatOffsets[@]}"; + do + (( "$hostOffset" <= "$targetOffset" )) || continue; + local pkgsRef="${pkgsVar}[$targetOffset - $hostOffset]"; + local pkgsSlice="${!pkgsRef}[@]"; + for pkg in ${!pkgsSlice+"${!pkgsSlice}"}; + do + activatePackage "$pkg" "$hostOffset" "$targetOffset"; + done; + done; + done +} +_addRpathPrefix () +{ + if [ "${NIX_NO_SELF_RPATH:-0}" != 1 ]; then + export NIX_LDFLAGS="-rpath $1/lib ${NIX_LDFLAGS-}"; + if [ -n "${NIX_LIB64_IN_SELF_RPATH:-}" ]; then + export NIX_LDFLAGS="-rpath $1/lib64 ${NIX_LDFLAGS-}"; + fi; + if [ -n "${NIX_LIB32_IN_SELF_RPATH:-}" ]; then + export NIX_LDFLAGS="-rpath $1/lib32 ${NIX_LDFLAGS-}"; + fi; + fi +} +_addToEnv () +{ + local -i depHostOffset depTargetOffset; + local pkg; + for depHostOffset in "${allPlatOffsets[@]}"; + do + local hookVar="${pkgHookVarVars[$depHostOffset + 1]}"; + local pkgsVar="${pkgAccumVarVars[$depHostOffset + 1]}"; + for depTargetOffset in "${allPlatOffsets[@]}"; + do + (( "$depHostOffset" <= "$depTargetOffset" )) || continue; + local hookRef="${hookVar}[$depTargetOffset - $depHostOffset]"; + if [[ -z "${strictDeps-}" ]]; then + local visitedPkgs=""; + for pkg in ${pkgsBuildBuild+"${pkgsBuildBuild[@]}"} ${pkgsBuildHost+"${pkgsBuildHost[@]}"} ${pkgsBuildTarget+"${pkgsBuildTarget[@]}"} ${pkgsHostHost+"${pkgsHostHost[@]}"} ${pkgsHostTarget+"${pkgsHostTarget[@]}"} ${pkgsTargetTarget+"${pkgsTargetTarget[@]}"}; + do + if [[ "$visitedPkgs" = *"$pkg"* ]]; then + continue; + fi; + runHook "${!hookRef}" "$pkg"; + visitedPkgs+=" $pkg"; + done; + else + local pkgsRef="${pkgsVar}[$depTargetOffset - $depHostOffset]"; + local pkgsSlice="${!pkgsRef}[@]"; + for pkg in ${!pkgsSlice+"${!pkgsSlice}"}; + do + runHook "${!hookRef}" "$pkg"; + done; + fi; + done; + done +} +_allFlags () +{ + for varName in $(awk 'BEGIN { for (v in ENVIRON) if (v ~ /^[a-z][a-zA-Z0-9_]*$/) print v }'); + do + if (( "${NIX_DEBUG:-0}" >= 1 )); then + printf "@%s@ -> %q\n" "${varName}" "${!varName}"; + fi; + args+=("--subst-var" "$varName"); + done +} +_assignFirst () +{ + local varName="$1"; + local REMOVE=REMOVE; + shift; + while (( $# )); do + if [ -n "${!1-}" ]; then + eval "${varName}"="$1"; + return; + fi; + shift; + done; + echo "Error: _assignFirst found no valid variant!"; + return 1 +} +_callImplicitHook () +{ + local def="$1"; + local hookName="$2"; + if declare -F "$hookName" > /dev/null; then + "$hookName"; + else + if type -p "$hookName" > /dev/null; then + source "$hookName"; + else + if [ -n "${!hookName:-}" ]; then + eval "${!hookName}"; + else + return "$def"; + fi; + fi; + fi +} +_defaultUnpack () +{ + local fn="$1"; + if [ -d "$fn" ]; then + cp -pr --reflink=auto -- "$fn" "$(stripHash "$fn")"; + else + case "$fn" in + *.tar.xz | *.tar.lzma | *.txz) + xz -d < "$fn" | tar xf - + ;; + *.tar | *.tar.* | *.tgz | *.tbz2 | *.tbz) + tar xf "$fn" + ;; + *) + return 1 + ;; + esac; + fi +} +_doStrip () +{ + local -ra flags=(dontStripHost dontStripTarget); + local -ra stripCmds=(STRIP TARGET_STRIP); + if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then + dontStripTarget+=1; + fi; + local i; + for i in ${!stripCmds[@]}; + do + local -n flag="${flags[$i]}"; + local -n stripCmd="${stripCmds[$i]}"; + if [[ -n "${dontStrip-}" || -n "${flag-}" ]] || ! type -f "${stripCmd-}" 2> /dev/null; then + continue; + fi; + stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}; + if [ -n "$stripDebugList" ]; then + stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"; + fi; + stripAllList=${stripAllList:-}; + if [ -n "$stripAllList" ]; then + stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"; + fi; + done +} +_eval () +{ + if declare -F "$1" > /dev/null 2>&1; then + "$@"; + else + eval "$1"; + fi +} +_makeSymlinksRelative () +{ + local symlinkTarget; + if [ -n "${dontRewriteSymlinks-}" ]; then + return 0; + fi; + while IFS= read -r -d '' f; do + symlinkTarget=$(readlink "$f"); + if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then + continue; + fi; + if [ ! -e "$symlinkTarget" ]; then + echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"; + fi; + echo "rewriting symlink $f to be relative to $prefix"; + ln -snrf "$symlinkTarget" "$f"; + done < <(find $prefix -type l -print0) +} +_moveLib64 () +{ + if [ "${dontMoveLib64-}" = 1 ]; then + return; + fi; + if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then + return; + fi; + echo "moving $prefix/lib64/* to $prefix/lib"; + mkdir -p $prefix/lib; + shopt -s dotglob; + for i in $prefix/lib64/*; + do + mv --no-clobber "$i" $prefix/lib; + done; + shopt -u dotglob; + rmdir $prefix/lib64; + ln -s lib $prefix/lib64 +} +_moveSbin () +{ + if [ "${dontMoveSbin-}" = 1 ]; then + return; + fi; + if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then + return; + fi; + echo "moving $prefix/sbin/* to $prefix/bin"; + mkdir -p $prefix/bin; + shopt -s dotglob; + for i in $prefix/sbin/*; + do + mv "$i" $prefix/bin; + done; + shopt -u dotglob; + rmdir $prefix/sbin; + ln -s bin $prefix/sbin +} +_moveToShare () +{ + forceShare=${forceShare:=man doc info}; + if [ -z "$forceShare" -o -z "$out" ]; then + return; + fi; + for d in $forceShare; + do + if [ -d "$out/$d" ]; then + if [ -d "$out/share/$d" ]; then + echo "both $d/ and share/$d/ exist!"; + else + echo "moving $out/$d to $out/share/$d"; + mkdir -p $out/share; + mv $out/$d $out/share/; + fi; + fi; + done +} +_multioutConfig () +{ + if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then + return; + fi; + if [ -z "$shareDocName" ]; then + local confScript="$configureScript"; + if [ -z "$confScript" ] && [ -x ./configure ]; then + confScript=./configure; + fi; + if [ -f "$confScript" ]; then + local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"; + fi; + if [ -n "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then + shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"; + fi; + fi; + configureFlags=" --bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin --includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include --mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info --docdir=${!outputDoc}/share/doc/${shareDocName} --libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec --localedir=${!outputLib}/share/locale $configureFlags"; + installFlags=" pkgconfigdir=${!outputDev}/lib/pkgconfig m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal $installFlags" +} +_multioutDevs () +{ + if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then + return; + fi; + moveToOutput include "${!outputInclude}"; + moveToOutput lib/pkgconfig "${!outputDev}"; + moveToOutput share/pkgconfig "${!outputDev}"; + moveToOutput lib/cmake "${!outputDev}"; + moveToOutput share/aclocal "${!outputDev}"; + for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; + do + echo "Patching '$f' includedir to output ${!outputInclude}"; + sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"; + done +} +_multioutDocs () +{ + local REMOVE=REMOVE; + moveToOutput share/info "${!outputInfo}"; + moveToOutput share/doc "${!outputDoc}"; + moveToOutput share/gtk-doc "${!outputDevdoc}"; + moveToOutput share/devhelp/books "${!outputDevdoc}"; + moveToOutput share/man "${!outputMan}"; + moveToOutput share/man/man3 "${!outputDevman}" +} +_multioutPropagateDev () +{ + if [ "$outputs" = "out" ]; then + return; + fi; + local outputFirst; + for outputFirst in $outputs; + do + break; + done; + local propagaterOutput="$outputDev"; + if [ -z "$propagaterOutput" ]; then + propagaterOutput="$outputFirst"; + fi; + if [ -z "${propagatedBuildOutputs+1}" ]; then + local po_dirty="$outputBin $outputInclude $outputLib"; + set +o pipefail; + propagatedBuildOutputs=`echo "$po_dirty" | tr -s ' ' '\n' | grep -v -F "$propagaterOutput" | sort -u | tr '\n' ' ' `; + set -o pipefail; + fi; + if [ -z "$propagatedBuildOutputs" ]; then + return; + fi; + mkdir -p "${!propagaterOutput}"/nix-support; + for output in $propagatedBuildOutputs; + do + echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs; + done +} +_overrideFirst () +{ + if [ -z "${!1-}" ]; then + _assignFirst "$@"; + fi +} +_pruneLibtoolFiles () +{ + if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then + return; + fi; + find "$prefix" -type f -name '*.la' -exec grep -q '^# Generated by .*libtool' {} \; -exec grep -q "^old_library=''" {} \; -exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \; +} +_updateSourceDateEpochFromSourceRoot () +{ + if [ -n "$sourceRoot" ]; then + updateSourceDateEpoch "$sourceRoot"; + fi +} +activatePackage () +{ + local pkg="$1"; + local -ri hostOffset="$2"; + local -ri targetOffset="$3"; + (( "$hostOffset" <= "$targetOffset" )) || exit -1; + if [ -f "$pkg" ]; then + source "$pkg"; + fi; + if [[ ( -z "${strictDeps-}" || "$hostOffset" -le -1 ) && -d "$pkg/bin" ]]; then + addToSearchPath _PATH "$pkg/bin"; + fi; + if [[ "$hostOffset" -eq 0 && -d "$pkg/bin" ]]; then + addToSearchPath _HOST_PATH "$pkg/bin"; + fi; + if [[ -f "$pkg/nix-support/setup-hook" ]]; then + source "$pkg/nix-support/setup-hook"; + fi +} +addEnvHooks () +{ + local depHostOffset="$1"; + shift; + local pkgHookVarsSlice="${pkgHookVarVars[$depHostOffset + 1]}[@]"; + local pkgHookVar; + for pkgHookVar in "${!pkgHookVarsSlice}"; + do + eval "${pkgHookVar}s"'+=("$@")'; + done +} +addToSearchPath () +{ + addToSearchPathWithCustomDelimiter ":" "$@" +} +addToSearchPathWithCustomDelimiter () +{ + local delimiter="$1"; + local varName="$2"; + local dir="$3"; + if [ -d "$dir" ]; then + export "${varName}=${!varName:+${!varName}${delimiter}}${dir}"; + fi +} +auditTmpdir () +{ + local dir="$1"; + [ -e "$dir" ] || return 0; + header "checking for references to $TMPDIR/ in $dir..."; + local i; + while IFS= read -r -d '' i; do + if [[ "$i" =~ .build-id ]]; then + continue; + fi; + if isELF "$i"; then + if { + printf :; + patchelf --print-rpath "$i" + } | grep -q -F ":$TMPDIR/"; then + echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"; + exit 1; + fi; + fi; + if isScript "$i"; then + if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then + if grep -q -F "$TMPDIR/" "$i"; then + echo "wrapper script $i contains a forbidden reference to $TMPDIR/"; + exit 1; + fi; + fi; + fi; + done < <(find "$dir" -type f -print0); + stopNest +} +bintoolsWrapper_addLDVars () +{ + local role_post role_pre; + getHostRoleEnvHook; + if [[ -d "$1/lib64" && ! -L "$1/lib64" ]]; then + export NIX_${role_pre}LDFLAGS+=" -L$1/lib64"; + fi; + if [[ -d "$1/lib" ]]; then + local -a glob=($1/lib/lib*); + if [ "${#glob[*]}" -gt 0 ]; then + export NIX_${role_pre}LDFLAGS+=" -L$1/lib"; + fi; + fi +} +buildPhase () +{ + runHook preBuild; + : ${makeFlags=}; + if [[ -z "$makeFlags" && -z "${makefile:-}" && ! ( -e Makefile || -e makefile || -e GNUmakefile ) ]]; then + echo "no Makefile, doing nothing"; + else + foundMakefile=1; + local flagsArray=(${enableParallelBuilding:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} SHELL=$SHELL $makeFlags ${makeFlagsArray+"${makeFlagsArray[@]}"} $buildFlags ${buildFlagsArray+"${buildFlagsArray[@]}"}); + echoCmd 'build flags' "${flagsArray[@]}"; + make ${makefile:+-f $makefile} "${flagsArray[@]}"; + unset flagsArray; + fi; + runHook postBuild +} +ccWrapper_addCVars () +{ + local role_post role_pre; + getHostRoleEnvHook; + if [ -d "$1/include" ]; then + export NIX_${role_pre}CFLAGS_COMPILE+=" -isystem $1/include"; + fi; + if [ -d "$1/Library/Frameworks" ]; then + export NIX_${role_pre}CFLAGS_COMPILE+=" -iframework $1/Library/Frameworks"; + fi +} +checkPhase () +{ + runHook preCheck; + if [[ -z "${foundMakefile:-}" ]]; then + echo "no Makefile or custom buildPhase, doing nothing"; + runHook postCheck; + return; + fi; + if [[ -z "${checkTarget:-}" ]]; then + if make -n ${makefile:+-f $makefile} check > /dev/null 2>&1; then + checkTarget=check; + else + if make -n ${makefile:+-f $makefile} test > /dev/null 2>&1; then + checkTarget=test; + fi; + fi; + fi; + if [[ -z "${checkTarget:-}" ]]; then + echo "no check/test target in ${makefile:-Makefile}, doing nothing"; + else + local flagsArray=(${enableParallelChecking:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} SHELL=$SHELL $makeFlags ${makeFlagsArray+"${makeFlagsArray[@]}"} ${checkFlags:-VERBOSE=y} ${checkFlagsArray+"${checkFlagsArray[@]}"} ${checkTarget}); + echoCmd 'check flags' "${flagsArray[@]}"; + make ${makefile:+-f $makefile} "${flagsArray[@]}"; + unset flagsArray; + fi; + runHook postCheck +} +closeNest () +{ + true +} +compressManPages () +{ + local dir="$1"; + if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]; then + return; + fi; + echo "gzipping man pages under $dir/share/man/"; + find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 | while IFS= read -r -d '' f; do + if gzip -c -n "$f" > "$f".gz; then + rm "$f"; + else + rm "$f".gz; + fi; + done; + find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 | while IFS= read -r -d '' f; do + local target; + target="$(readlink -f "$f")"; + if [ -f "$target".gz ]; then + ln -sf "$target".gz "$f".gz && rm "$f"; + fi; + done +} +configurePhase () +{ + runHook preConfigure; + : ${configureScript=}; + : ${configureFlags=}; + if [[ -z "$configureScript" && -x ./configure ]]; then + configureScript=./configure; + fi; + if [ -z "${dontFixLibtool:-}" ]; then + local i; + find . -iname "ltmain.sh" -print0 | while IFS='' read -r -d '' i; do + echo "fixing libtool script $i"; + fixLibtool "$i"; + done; + fi; + if [[ -z "${dontAddPrefix:-}" && -n "$prefix" ]]; then + configureFlags="${prefixKey:---prefix=}$prefix $configureFlags"; + fi; + if [ -z "${dontAddDisableDepTrack:-}" ]; then + if [ -f "$configureScript" ] && grep -q dependency-tracking "$configureScript"; then + configureFlags="--disable-dependency-tracking $configureFlags"; + fi; + fi; + if [ -z "${dontDisableStatic:-}" ]; then + if [ -f "$configureScript" ] && grep -q enable-static "$configureScript"; then + configureFlags="--disable-static $configureFlags"; + fi; + fi; + if [ -n "$configureScript" ]; then + local flagsArray=($configureFlags ${configureFlagsArray+"${configureFlagsArray[@]}"}); + echoCmd 'configure flags' "${flagsArray[@]}"; + $configureScript "${flagsArray[@]}"; + unset flagsArray; + else + echo "no configure script, doing nothing"; + fi; + runHook postConfigure +} +consumeEntire () +{ + if IFS='' read -r -N 0 $1; then + echo "consumeEntire(): ERROR: Input null bytes, won't process" 1>&2; + return 1; + fi +} +distPhase () +{ + runHook preDist; + local flagsArray=($distFlags ${distFlagsArray+"${distFlagsArray[@]}"} ${distTarget:-dist}); + echo 'dist flags: %q' "${flagsArray[@]}"; + make ${makefile:+-f $makefile} "${flagsArray[@]}"; + if [ "${dontCopyDist:-0}" != 1 ]; then + mkdir -p "$out/tarballs"; + cp -pvd ${tarballs:-*.tar.gz} "$out/tarballs"; + fi; + runHook postDist +} +dumpVars () +{ + if [ "${noDumpEnvVars:-0}" != 1 ]; then + export > "$NIX_BUILD_TOP/env-vars" || true; + fi +} +echoCmd () +{ + printf "%s:" "$1"; + shift; + printf ' %q' "$@"; + echo +} +exitHandler () +{ + exitCode="$?"; + set +e; + if [ -n "${showBuildStats:-}" ]; then + times > "$NIX_BUILD_TOP/.times"; + local -a times=($(cat "$NIX_BUILD_TOP/.times")); + echo "build time elapsed: " "${times[@]}"; + fi; + if (( "$exitCode" != 0 )); then + runHook failureHook; + if [ -n "${succeedOnFailure:-}" ]; then + echo "build failed with exit code $exitCode (ignored)"; + mkdir -p "$out/nix-support"; + printf "%s" "$exitCode" > "$out/nix-support/failed"; + exit 0; + fi; + else + runHook exitHook; + fi; + exit "$exitCode" +} +findInputs () +{ + local -r pkg="$1"; + local -ri hostOffset="$2"; + local -ri targetOffset="$3"; + (( "$hostOffset" <= "$targetOffset" )) || exit -1; + local varVar="${pkgAccumVarVars[$hostOffset + 1]}"; + local varRef="$varVar[\$targetOffset - \$hostOffset]"; + local var="${!varRef}"; + unset -v varVar varRef; + local varSlice="$var[*]"; + case "${!varSlice-}" in + *" $pkg "*) + return 0 + ;; + esac; + unset -v varSlice; + eval "$var"'+=("$pkg")'; + if ! [ -e "$pkg" ]; then + echo "build input $pkg does not exist" 1>&2; + exit 1; + fi; + local -i mapOffsetResult; + function mapOffset () + { + local -ri inputOffset="$1"; + if (( "$inputOffset" <= 0 )); then + local -ri outputOffset="$inputOffset + $hostOffset"; + else + local -ri outputOffset="$inputOffset - 1 + $targetOffset"; + fi; + mapOffsetResult="$outputOffset" + }; + local -i relHostOffset; + for relHostOffset in "${allPlatOffsets[@]}"; + do + local files="${propagatedDepFilesVars[$relHostOffset + 1]}"; + mapOffset relHostOffset; + local -i hostOffsetNext="$mapOffsetResult"; + [[ "${allPlatOffsets[*]}" = *"$hostOffsetNext"* ]] || continue; + local -i relTargetOffset; + for relTargetOffset in "${allPlatOffsets[@]}"; + do + (( "$relHostOffset" <= "$relTargetOffset" )) || continue; + local fileRef="${files}[$relTargetOffset - $relHostOffset]"; + local file="${!fileRef}"; + unset -v fileRef; + mapOffset relTargetOffset; + local -i targetOffsetNext="$mapOffsetResult"; + [[ "${allPlatOffsets[*]}" = *"$targetOffsetNext"* ]] || continue; + [[ -f "$pkg/nix-support/$file" ]] || continue; + local pkgNext; + read -r -d '' pkgNext < "$pkg/nix-support/$file" || true; + for pkgNext in $pkgNext; + do + findInputs "$pkgNext" "$hostOffsetNext" "$targetOffsetNext"; + done; + done; + done +} +fixLibtool () +{ + sed -i -e 's^eval sys_lib_.*search_path=.*^^' "$1" +} +fixupPhase () +{ + local output; + for output in $outputs; + do + if [ -e "${!output}" ]; then + chmod -R u+w "${!output}"; + fi; + done; + runHook preFixup; + local output; + for output in $outputs; + do + prefix="${!output}" runHook fixupOutput; + done; + declare -ra flatVars=(depsBuildBuildPropagated propagatedNativeBuildInputs depsBuildTargetPropagated depsHostHostPropagated propagatedBuildInputs depsTargetTargetPropagated); + declare -ra flatFiles=("${propagatedBuildDepFiles[@]}" "${propagatedHostDepFiles[@]}" "${propagatedTargetDepFiles[@]}"); + local propagatedInputsIndex; + for propagatedInputsIndex in "${!flatVars[@]}"; + do + local propagatedInputsSlice="${flatVars[$propagatedInputsIndex]}[@]"; + local propagatedInputsFile="${flatFiles[$propagatedInputsIndex]}"; + [[ -n "${!propagatedInputsSlice}" ]] || continue; + mkdir -p "${!outputDev}/nix-support"; + printWords ${!propagatedInputsSlice} > "${!outputDev}/nix-support/$propagatedInputsFile"; + done; + if [ -n "${setupHook:-}" ]; then + mkdir -p "${!outputDev}/nix-support"; + substituteAll "$setupHook" "${!outputDev}/nix-support/setup-hook"; + fi; + if [ -n "${setupHooks:-}" ]; then + mkdir -p "${!outputDev}/nix-support"; + local hook; + for hook in $setupHooks; + do + local content; + consumeEntire content < "$hook"; + substituteAllStream content "file '$hook'" >> "${!outputDev}/nix-support/setup-hook"; + unset -v content; + done; + unset -v hook; + fi; + if [ -n "${propagatedUserEnvPkgs:-}" ]; then + mkdir -p "${!outputBin}/nix-support"; + printWords $propagatedUserEnvPkgs > "${!outputBin}/nix-support/propagated-user-env-packages"; + fi; + runHook postFixup +} +genericBuild () +{ + if [ -f "${buildCommandPath:-}" ]; then + source "$buildCommandPath"; + return; + fi; + if [ -n "${buildCommand:-}" ]; then + eval "$buildCommand"; + return; + fi; + if [ -z "${phases:-}" ]; then + phases="${prePhases:-} unpackPhase patchPhase ${preConfigurePhases:-} configurePhase ${preBuildPhases:-} buildPhase checkPhase ${preInstallPhases:-} installPhase ${preFixupPhases:-} fixupPhase installCheckPhase ${preDistPhases:-} distPhase ${postPhases:-}"; + fi; + for curPhase in $phases; + do + if [[ "$curPhase" = unpackPhase && -n "${dontUnpack:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = configurePhase && -n "${dontConfigure:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = buildPhase && -n "${dontBuild:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = checkPhase && -z "${doCheck:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = installPhase && -n "${dontInstall:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = fixupPhase && -n "${dontFixup:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = installCheckPhase && -z "${doInstallCheck:-}" ]]; then + continue; + fi; + if [[ "$curPhase" = distPhase && -z "${doDist:-}" ]]; then + continue; + fi; + if [[ -n $NIX_LOG_FD ]]; then + echo "@nix { \"action\": \"setPhase\", \"phase\": \"$curPhase\" }" 1>&$NIX_LOG_FD; + fi; + showPhaseHeader "$curPhase"; + dumpVars; + eval "${!curPhase:-$curPhase}"; + if [ "$curPhase" = unpackPhase ]; then + cd "${sourceRoot:-.}"; + fi; + done +} +getHostRole () +{ + getRole "$hostOffset" +} +getHostRoleEnvHook () +{ + getRole "$depHostOffset" +} +getRole () +{ + case $1 in + -1) + role_pre='BUILD_'; + role_post='_FOR_BUILD' + ;; + 0) + role_pre=''; + role_post='' + ;; + 1) + role_pre='TARGET_'; + role_post='_FOR_TARGET' + ;; + *) + echo "binutils-wrapper-2.31.1: used as improper sort of dependency" > 2; + return 1 + ;; + esac +} +getTargetRole () +{ + getRole "$targetOffset" +} +getTargetRoleEnvHook () +{ + getRole "$depTargetOffset" +} +getTargetRoleWrapper () +{ + case $targetOffset in + -1) + export NIX_BINTOOLS_WRAPPER_x86_64_unknown_linux_gnu_TARGET_BUILD=1 + ;; + 0) + export NIX_BINTOOLS_WRAPPER_x86_64_unknown_linux_gnu_TARGET_HOST=1 + ;; + 1) + export NIX_BINTOOLS_WRAPPER_x86_64_unknown_linux_gnu_TARGET_TARGET=1 + ;; + *) + echo "binutils-wrapper-2.31.1: used as improper sort of dependency" > 2; + return 1 + ;; + esac +} +header () +{ + echo "$1" +} +installCheckPhase () +{ + runHook preInstallCheck; + if [[ -z "${foundMakefile:-}" ]]; then + echo "no Makefile or custom buildPhase, doing nothing"; + else + if [[ -z "${installCheckTarget:-}" ]] && ! make -n ${makefile:+-f $makefile} ${installCheckTarget:-installcheck} > /dev/null 2>&1; then + echo "no installcheck target in ${makefile:-Makefile}, doing nothing"; + else + local flagsArray=(${enableParallelChecking:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} SHELL=$SHELL $makeFlags ${makeFlagsArray+"${makeFlagsArray[@]}"} $installCheckFlags ${installCheckFlagsArray+"${installCheckFlagsArray[@]}"} ${installCheckTarget:-installcheck}); + echoCmd 'installcheck flags' "${flagsArray[@]}"; + make ${makefile:+-f $makefile} "${flagsArray[@]}"; + unset flagsArray; + fi; + fi; + runHook postInstallCheck +} +installPhase () +{ + runHook preInstall; + if [ -n "$prefix" ]; then + mkdir -p "$prefix"; + fi; + local flagsArray=(SHELL=$SHELL $makeFlags ${makeFlagsArray+"${makeFlagsArray[@]}"} $installFlags ${installFlagsArray+"${installFlagsArray[@]}"} ${installTargets:-install}); + echoCmd 'install flags' "${flagsArray[@]}"; + make ${makefile:+-f $makefile} "${flagsArray[@]}"; + unset flagsArray; + runHook postInstall +} +isELF () +{ + local fn="$1"; + local fd; + local magic; + exec {fd}< "$fn"; + read -r -n 4 -u "$fd" magic; + exec {fd}>&-; + if [ "$magic" = 'ELF' ]; then + return 0; + else + return 1; + fi +} +isScript () +{ + local fn="$1"; + local fd; + local magic; + exec {fd}< "$fn"; + read -r -n 2 -u "$fd" magic; + exec {fd}>&-; + if [[ "$magic" =~ \#! ]]; then + return 0; + else + return 1; + fi +} +mapOffset () +{ + local -ri inputOffset="$1"; + if (( "$inputOffset" <= 0 )); then + local -ri outputOffset="$inputOffset + $hostOffset"; + else + local -ri outputOffset="$inputOffset - 1 + $targetOffset"; + fi; + mapOffsetResult="$outputOffset" +} +moveToOutput () +{ + local patt="$1"; + local dstOut="$2"; + local output; + for output in $outputs; + do + if [ "${!output}" = "$dstOut" ]; then + continue; + fi; + local srcPath; + for srcPath in "${!output}"/$patt; + do + if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then + continue; + fi; + if [ "$dstOut" = REMOVE ]; then + echo "Removing $srcPath"; + rm -r "$srcPath"; + else + local dstPath="$dstOut${srcPath#${!output}}"; + echo "Moving $srcPath to $dstPath"; + if [ -d "$dstPath" ] && [ -d "$srcPath" ]; then + rmdir "$srcPath" --ignore-fail-on-non-empty; + if [ -d "$srcPath" ]; then + mv -t "$dstPath" "$srcPath"/*; + rmdir "$srcPath"; + fi; + else + mkdir -p "$(readlink -m "$dstPath/..")"; + mv "$srcPath" "$dstPath"; + fi; + fi; + local srcParent="$(readlink -m "$srcPath/..")"; + if rmdir "$srcParent"; then + echo "Removing empty $srcParent/ and (possibly) its parents"; + rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" 2> /dev/null || true; + fi; + done; + done +} +patchELF () +{ + local dir="$1"; + [ -e "$dir" ] || return 0; + header "shrinking RPATHs of ELF executables and libraries in $dir"; + local i; + while IFS= read -r -d '' i; do + if [[ "$i" =~ .build-id ]]; then + continue; + fi; + if ! isELF "$i"; then + continue; + fi; + echo "shrinking $i"; + patchelf --shrink-rpath "$i" || true; + done < <(find "$dir" -type f -print0); + stopNest +} +patchPhase () +{ + runHook prePatch; + for i in ${patches:-}; + do + header "applying patch $i" 3; + local uncompress=cat; + case "$i" in + *.gz) + uncompress="gzip -d" + ;; + *.bz2) + uncompress="bzip2 -d" + ;; + *.xz) + uncompress="xz -d" + ;; + *.lzma) + uncompress="lzma -d" + ;; + esac; + $uncompress < "$i" 2>&1 | patch ${patchFlags:--p1}; + done; + runHook postPatch +} +patchShebangs () +{ + local pathName; + if [ "$1" = "--host" ]; then + pathName=HOST_PATH; + shift; + else + if [ "$1" = "--build" ]; then + pathName=PATH; + shift; + fi; + fi; + echo "patching script interpreter paths in $@"; + local f; + local oldPath; + local newPath; + local arg0; + local args; + local oldInterpreterLine; + local newInterpreterLine; + if [ $# -eq 0 ]; then + echo "No arguments supplied to patchShebangs" > 0; + return 0; + fi; + local f; + while IFS= read -r -d '' f; do + isScript "$f" || continue; + oldInterpreterLine=$(head -1 "$f" | tail -c+3); + read -r oldPath arg0 args <<< "$oldInterpreterLine"; + if [ -z "$pathName" ]; then + if [ -n "$strictDeps" ] && [[ "$f" = "$NIX_STORE"* ]]; then + pathName=HOST_PATH; + else + pathName=PATH; + fi; + fi; + if $(echo "$oldPath" | grep -q "/bin/env$"); then + if $(echo "$arg0" | grep -q -- "^-.*\|.*=.*"); then + echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" > 0; + exit 1; + fi; + newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"; + else + if [ "$oldPath" = "" ]; then + oldPath="/bin/sh"; + fi; + newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"; + args="$arg0 $args"; + fi; + newInterpreterLine="$(echo "$newPath $args" | sed 's/[[:space:]]*$//')"; + if [ -n "$oldPath" -a "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then + if [ -n "$newPath" -a "$newPath" != "$oldPath" ]; then + echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""; + escapedInterpreterLine=$(echo "$newInterpreterLine" | sed 's|\\|\\\\|g'); + timestamp=$(mktemp); + touch -r "$f" "$timestamp"; + sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"; + touch -r "$timestamp" "$f"; + rm "$timestamp"; + fi; + fi; + done < <(find "$@" -type f -perm -0100 -print0); + stopNest +} +patchShebangsAuto () +{ + if [ -z "${dontPatchShebangs-}" -a -e "$prefix" ]; then + if [ "$output" != out ] && [ "$output" = "$outputDev" ]; then + patchShebangs --build "$prefix"; + else + patchShebangs --host "$prefix"; + fi; + fi +} +printLines () +{ + (( "$#" > 0 )) || return 0; + printf '%s\n' "$@" +} +printWords () +{ + (( "$#" > 0 )) || return 0; + printf '%s ' "$@" +} +runHook () +{ + local hookName="$1"; + shift; + local hooksSlice="${hookName%Hook}Hooks[@]"; + local hook; + for hook in "_callImplicitHook 0 $hookName" ${!hooksSlice+"${!hooksSlice}"}; + do + _eval "$hook" "$@"; + done; + return 0 +} +runOneHook () +{ + local hookName="$1"; + shift; + local hooksSlice="${hookName%Hook}Hooks[@]"; + local hook ret=1; + for hook in "_callImplicitHook 1 $hookName" ${!hooksSlice+"${!hooksSlice}"}; + do + if _eval "$hook" "$@"; then + ret=0; + break; + fi; + done; + return "$ret" +} +showPhaseHeader () +{ + local phase="$1"; + case "$phase" in + unpackPhase) + header "unpacking sources" + ;; + patchPhase) + header "patching sources" + ;; + configurePhase) + header "configuring" + ;; + buildPhase) + header "building" + ;; + checkPhase) + header "running tests" + ;; + installPhase) + header "installing" + ;; + fixupPhase) + header "post-installation fixup" + ;; + installCheckPhase) + header "running install tests" + ;; + *) + header "$phase" + ;; + esac +} +stopNest () +{ + true +} +stripDirs () +{ + local cmd="$1"; + local dirs="$2"; + local stripFlags="$3"; + local dirsNew=; + local d; + for d in ${dirs}; + do + if [ -d "$prefix/$d" ]; then + dirsNew="${dirsNew} $prefix/$d "; + fi; + done; + dirs=${dirsNew}; + if [ -n "${dirs}" ]; then + header "stripping (with command $cmd and flags $stripFlags) in$dirs"; + find $dirs -type f -print0 | xargs -0 ${xargsFlags:--r} $cmd $commonStripFlags $stripFlags 2> /dev/null || true; + stopNest; + fi +} +stripHash () +{ + local strippedName casematchOpt=0; + strippedName="$(basename -- "$1")"; + shopt -q nocasematch && casematchOpt=1; + shopt -u nocasematch; + if [[ "$strippedName" =~ ^[a-z0-9]{32}- ]]; then + echo "${strippedName:33}"; + else + echo "$strippedName"; + fi; + if (( casematchOpt )); then + shopt -s nocasematch; + fi +} +substitute () +{ + local input="$1"; + local output="$2"; + shift 2; + if [ ! -f "$input" ]; then + echo "substitute(): ERROR: file '$input' does not exist" 1>&2; + return 1; + fi; + local content; + consumeEntire content < "$input"; + if [ -e "$output" ]; then + chmod +w "$output"; + fi; + substituteStream content "file '$input'" "$@" > "$output" +} +substituteAll () +{ + local input="$1"; + local output="$2"; + local -a args=(); + _allFlags; + substitute "$input" "$output" "${args[@]}" +} +substituteAllInPlace () +{ + local fileName="$1"; + shift; + substituteAll "$fileName" "$fileName" "$@" +} +substituteAllStream () +{ + local -a args=(); + _allFlags; + substituteStream "$1" "$2" "${args[@]}" +} +substituteInPlace () +{ + local fileName="$1"; + shift; + substitute "$fileName" "$fileName" "$@" +} +substituteStream () +{ + local var=$1; + local description=$2; + shift 2; + while (( "$#" )); do + case "$1" in + --replace) + pattern="$2"; + replacement="$3"; + shift 3; + local savedvar; + savedvar="${!var}"; + eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}'; + if [ "$pattern" != "$replacement" ]; then + if [ "${!var}" == "$savedvar" ]; then + echo "substituteStream(): WARNING: pattern '$pattern' doesn't match anything in $description" 1>&2; + fi; + fi + ;; + --subst-var) + local varName="$2"; + shift 2; + if ! [[ "$varName" =~ ^[a-zA-Z_][a-zA-Z0-9_]*$ ]]; then + echo "substituteStream(): ERROR: substitution variables must be valid Bash names, \"$varName\" isn't." 1>&2; + return 1; + fi; + if [ -z ${!varName+x} ]; then + echo "substituteStream(): ERROR: variable \$$varName is unset" 1>&2; + return 1; + fi; + pattern="@$varName@"; + replacement="${!varName}"; + eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}' + ;; + --subst-var-by) + pattern="@$2@"; + replacement="$3"; + eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}'; + shift 3 + ;; + *) + echo "substituteStream(): ERROR: Invalid command line argument: $1" 1>&2; + return 1 + ;; + esac; + done; + printf "%s" "${!var}" +} +unpackFile () +{ + curSrc="$1"; + header "unpacking source archive $curSrc" 3; + if ! runOneHook unpackCmd "$curSrc"; then + echo "do not know how to unpack source archive $curSrc"; + exit 1; + fi +} +unpackPhase () +{ + runHook preUnpack; + if [ -z "${srcs:-}" ]; then + if [ -z "${src:-}" ]; then + echo 'variable $src or $srcs should point to the source'; + exit 1; + fi; + srcs="$src"; + fi; + local dirsBefore=""; + for i in *; + do + if [ -d "$i" ]; then + dirsBefore="$dirsBefore $i "; + fi; + done; + for i in $srcs; + do + unpackFile "$i"; + done; + : ${sourceRoot=}; + if [ -n "${setSourceRoot:-}" ]; then + runOneHook setSourceRoot; + else + if [ -z "$sourceRoot" ]; then + for i in *; + do + if [ -d "$i" ]; then + case $dirsBefore in + *\ $i\ *) + + ;; + *) + if [ -n "$sourceRoot" ]; then + echo "unpacker produced multiple directories"; + exit 1; + fi; + sourceRoot="$i" + ;; + esac; + fi; + done; + fi; + fi; + if [ -z "$sourceRoot" ]; then + echo "unpacker appears to have produced no directories"; + exit 1; + fi; + echo "source root is $sourceRoot"; + if [ "${dontMakeSourcesWritable:-0}" != 1 ]; then + chmod -R u+w -- "$sourceRoot"; + fi; + runHook postUnpack +} +updateSourceDateEpoch () +{ + local path="$1"; + local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' | sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1)); + local time="${res[0]//\.[0-9]*/}"; + local newestFile="${res[1]}"; + if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then + echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"; + export SOURCE_DATE_EPOCH="$time"; + local now="$(date +%s)"; + if [ "$time" -gt $((now - 60)) ]; then + echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"; + fi; + fi +} + +export NIX_BUILD_TOP="$(mktemp -d --tmpdir nix-shell.XXXXXX)" +export TMP="$NIX_BUILD_TOP" +export TMPDIR="$NIX_BUILD_TOP" +export TEMP="$NIX_BUILD_TOP" +export TEMPDIR="$NIX_BUILD_TOP" +eval "$shellHook" diff --git a/frontend/elm-stuff/0.19.1/Cart.elmi b/frontend/elm-stuff/0.19.1/Cart.elmi new file mode 100644 index 0000000..60bb740 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Cart.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Cart.elmo b/frontend/elm-stuff/0.19.1/Cart.elmo new file mode 100644 index 0000000..0500a15 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Cart.elmo differ diff --git a/frontend/elm-stuff/0.19.1/Catalog.elmi b/frontend/elm-stuff/0.19.1/Catalog.elmi new file mode 100644 index 0000000..0610f11 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Catalog.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Catalog.elmo b/frontend/elm-stuff/0.19.1/Catalog.elmo new file mode 100644 index 0000000..c5146fc Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Catalog.elmo differ diff --git a/frontend/elm-stuff/0.19.1/Login.elmi b/frontend/elm-stuff/0.19.1/Login.elmi new file mode 100644 index 0000000..75c4629 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Login.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Login.elmo b/frontend/elm-stuff/0.19.1/Login.elmo new file mode 100644 index 0000000..9a173e3 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Login.elmo differ diff --git a/frontend/elm-stuff/0.19.1/Main.elmi b/frontend/elm-stuff/0.19.1/Main.elmi new file mode 100644 index 0000000..295ad17 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Main.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Main.elmo b/frontend/elm-stuff/0.19.1/Main.elmo new file mode 100644 index 0000000..0dc3269 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Main.elmo differ diff --git a/frontend/elm-stuff/0.19.1/Product.elmi b/frontend/elm-stuff/0.19.1/Product.elmi new file mode 100644 index 0000000..396a174 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Product.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Product.elmo b/frontend/elm-stuff/0.19.1/Product.elmo new file mode 100644 index 0000000..c387bed Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Product.elmo differ diff --git a/frontend/elm-stuff/0.19.1/Signup.elmi b/frontend/elm-stuff/0.19.1/Signup.elmi new file mode 100644 index 0000000..190d9de Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Signup.elmi differ diff --git a/frontend/elm-stuff/0.19.1/Signup.elmo b/frontend/elm-stuff/0.19.1/Signup.elmo new file mode 100644 index 0000000..bcdb43d Binary files /dev/null and b/frontend/elm-stuff/0.19.1/Signup.elmo differ diff --git a/frontend/elm-stuff/0.19.1/d.dat b/frontend/elm-stuff/0.19.1/d.dat new file mode 100644 index 0000000..244afff Binary files /dev/null and b/frontend/elm-stuff/0.19.1/d.dat differ diff --git a/frontend/elm-stuff/0.19.1/i.dat b/frontend/elm-stuff/0.19.1/i.dat new file mode 100644 index 0000000..d1d1ac1 Binary files /dev/null and b/frontend/elm-stuff/0.19.1/i.dat differ diff --git a/frontend/elm-stuff/0.19.1/lock b/frontend/elm-stuff/0.19.1/lock new file mode 100644 index 0000000..e69de29 diff --git a/frontend/elm-stuff/0.19.1/o.dat b/frontend/elm-stuff/0.19.1/o.dat new file mode 100644 index 0000000..f25f55b Binary files /dev/null and b/frontend/elm-stuff/0.19.1/o.dat differ diff --git a/frontend/elm.json b/frontend/elm.json new file mode 100644 index 0000000..64ee788 --- /dev/null +++ b/frontend/elm.json @@ -0,0 +1,27 @@ +{ + "type": "application", + "source-directories": [ + "src" + ], + "elm-version": "0.19.1", + "dependencies": { + "direct": { + "elm/browser": "1.0.2", + "elm/core": "1.0.5", + "elm/html": "1.0.0", + "elm/http": "2.0.0", + "elm/json": "1.1.3", + "elm/url": "1.0.0" + }, + "indirect": { + "elm/bytes": "1.0.8", + "elm/file": "1.0.5", + "elm/time": "1.0.0", + "elm/virtual-dom": "1.0.2" + } + }, + "test-dependencies": { + "direct": {}, + "indirect": {} + } +} diff --git a/frontend/src/Cart.elm b/frontend/src/Cart.elm new file mode 100644 index 0000000..a1750f6 --- /dev/null +++ b/frontend/src/Cart.elm @@ -0,0 +1,164 @@ +module Cart exposing (..) + +import Browser +import Browser.Navigation as Nav +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Decode as D +import Json.Encode as Encode +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + +type alias Product = + { id : Int + , name : String + , kind : Maybe String + , price : Float + , description : Maybe String + } + + +type alias Model = + { pageStatus : Status + , products : List Product + } + + +type Status + = Loading + | Loaded + | NotLoaded + + +type Msg + = CartLoaded (Result Http.Error (List Product)) + | FetchCartItems + | RemoveFromCart Int + | CartItemRemoved (Result Http.Error ()) + + +init : Model +init = + Model NotLoaded [] + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + CartLoaded res -> + case res of + Ok s -> + ( { model | products = s, pageStatus = Loaded }, Cmd.none ) + + Err e -> + let + _ = + Debug.log "error" e + in + ( { model | pageStatus = NotLoaded }, Cmd.none ) + + RemoveFromCart id -> + ( model, removeProduct id ) + + CartItemRemoved _ -> + ( { model | pageStatus = Loading }, fetchCartItems ) + + FetchCartItems -> + ( { model | pageStatus = Loading }, fetchCartItems ) + + +decodeProduct : D.Decoder Product +decodeProduct = + D.map5 Product + (D.field "id" D.int) + (D.field "name" D.string) + (D.field "kind" (D.nullable D.string)) + (D.field "price" D.float) + (D.field "description" (D.nullable D.string)) + + +decodeResponse : D.Decoder (List Product) +decodeResponse = + D.list decodeProduct + + +removeProduct : Int -> Cmd Msg +removeProduct id = + let + _ = + Debug.log "cart" "fetching cart items" + in + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/cart/remove" + , body = Http.stringBody "application/json" <| String.fromInt id + , expect = Http.expectWhatever CartItemRemoved + , timeout = Nothing + , tracker = Nothing + } + + +fetchCartItems : Cmd Msg +fetchCartItems = + let + _ = + Debug.log "cart" "fetching cart items" + in + Http.riskyRequest + { method = "GET" + , headers = [] + , url = "http://127.0.0.1:7878/cart/items" + , body = Http.emptyBody + , expect = Http.expectJson CartLoaded decodeResponse + , timeout = Nothing + , tracker = Nothing + } + + +viewStatus : Status -> String +viewStatus s = + case s of + Loading -> + "Loading" + + Loaded -> + "Ready!" + + NotLoaded -> + "Not loaded ..." + + +viewProduct : Product -> Html Msg +viewProduct p = + div [] + [ text p.name + , div [] [ text <| Maybe.withDefault "" p.kind ] + , div [] [ text <| Maybe.withDefault "" p.description ] + , div [] [ text <| String.fromFloat p.price ] + , div [] [ button [ onClick (RemoveFromCart p.id) ] [ text "Remove" ] ] + , div [] [ a [ href ("/product/" ++ String.fromInt p.id) ] [ text "View Product" ] ] + ] + + +view : Model -> Html Msg +view model = + case model.pageStatus of + Loading -> + div [] [ text <| viewStatus Loading ] + + _ -> + div [] + [ let + cart = + List.map viewProduct model.products + in + if List.isEmpty cart then + text "No items in cart" + + else + ul [] cart + ] diff --git a/frontend/src/Catalog.elm b/frontend/src/Catalog.elm new file mode 100644 index 0000000..80e5e38 --- /dev/null +++ b/frontend/src/Catalog.elm @@ -0,0 +1,125 @@ +module Catalog exposing (..) + +import Browser +import Browser.Navigation as Nav +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Decode as D +import Json.Encode as Encode +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + +type alias Product = + { id : Int + , name : String + , kind : Maybe String + , price : Float + , description : Maybe String + } + + +type alias Model = + { pageStatus : Status + , products : List Product + } + + +type Status + = Loading + | Loaded + | NotLoaded + + +type Msg + = ProductsLoaded (Result Http.Error (List Product)) + | FetchProducts + + +init : Model +init = + Model NotLoaded [] + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + ProductsLoaded res -> + case res of + Ok s -> + ( { model | products = s, pageStatus = Loaded }, Cmd.none ) + + Err e -> + let + _ = + Debug.log "error" e + in + ( { model | pageStatus = NotLoaded }, Cmd.none ) + + FetchProducts -> + ( { model | pageStatus = Loading }, fetchProducts ) + + +decodeProduct : D.Decoder Product +decodeProduct = + D.map5 Product + (D.field "id" D.int) + (D.field "name" D.string) + (D.field "kind" (D.nullable D.string)) + (D.field "price" D.float) + (D.field "description" (D.nullable D.string)) + + +decodeResponse : D.Decoder (List Product) +decodeResponse = + D.list decodeProduct + + +fetchProducts : Cmd Msg +fetchProducts = + let + _ = + Debug.log "err" "fetching products" + in + Http.get + { url = "http://127.0.0.1:7878/product/catalog" + , expect = Http.expectJson ProductsLoaded decodeResponse + } + + +viewStatus : Status -> String +viewStatus s = + case s of + Loading -> + "Loading" + + Loaded -> + "Ready!" + + NotLoaded -> + "Not loaded ..." + + +viewProduct : Product -> Html Msg +viewProduct p = + div [] + [ text p.name + , text <| Maybe.withDefault "" p.kind + , text <| Maybe.withDefault "" p.description + , text <| String.fromFloat p.price + , a [ href ("/product/" ++ String.fromInt p.id) ] [ text "View Product" ] + ] + + +view : Model -> Html Msg +view model = + case model.pageStatus of + Loading -> + div [] [ text <| viewStatus Loading ] + + _ -> + div [] + [ ul [] (List.map viewProduct model.products) + ] diff --git a/frontend/src/Login.elm b/frontend/src/Login.elm new file mode 100644 index 0000000..dd168f0 --- /dev/null +++ b/frontend/src/Login.elm @@ -0,0 +1,119 @@ +module Login exposing (..) + +import Browser +import Browser.Navigation as Nav +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Encode as Encode +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + +type alias Model = + { username : String + , password : String + , loginStatus : LoginStatus + } + + +type LoginStatus + = NotLoggedIn + | LoggedIn + | InvalidLogin + | LoggingIn + + +type Msg + = PassEntered String + | UserEntered String + | LoginPressed + | LoginSuccess (Result Http.Error ()) + | LoginFail + + +init : Model +init = + Model "" "" NotLoggedIn + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + PassEntered s -> + ( { model | password = s } + , Cmd.none + ) + + UserEntered s -> + ( { model | username = s } + , Cmd.none + ) + + LoginPressed -> + ( { model | loginStatus = LoggingIn }, tryLogin model ) + + LoginSuccess res -> + case res of + Ok s -> + ( { model | loginStatus = LoggedIn }, Cmd.none ) + + Err e -> + ( { model | loginStatus = InvalidLogin }, Cmd.none ) + + LoginFail -> + ( { model | loginStatus = InvalidLogin }, Cmd.none ) + + +encodeLogin : Model -> Encode.Value +encodeLogin model = + Encode.object + [ ( "username", Encode.string model.username ) + , ( "password", Encode.string model.password ) + ] + + +tryLogin : Model -> Cmd Msg +tryLogin model = + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/user/login" + , body = model |> encodeLogin |> Http.jsonBody + , expect = Http.expectWhatever LoginSuccess + , timeout = Nothing + , tracker = Nothing + } + + +viewStatus : LoginStatus -> String +viewStatus ls = + case ls of + NotLoggedIn -> + "Not Logged In" + + InvalidLogin -> + "Invalid Login" + + LoggedIn -> + "Logged in!" + + LoggingIn -> + "Logging In ..." + + +viewInput : String -> String -> String -> (String -> msg) -> Html msg +viewInput t p v toMsg = + input [ type_ t, placeholder p, value v, onInput toMsg ] [] + + +view : Model -> Html Msg +view model = + div [] + [ div [] [ viewInput "text" "Enter name here" model.username UserEntered ] + , div [] [ viewInput "password" "Password" model.password PassEntered ] + , div [] [ button [ onClick LoginPressed ] [ text "Login" ] ] + , div [] [ text (viewStatus model.loginStatus) ] + , div [] [ text "Don't have an account? ", a [ href "/signup" ] [ text "Register now!" ] ] + ] diff --git a/frontend/src/Main.elm b/frontend/src/Main.elm new file mode 100644 index 0000000..bf1583c --- /dev/null +++ b/frontend/src/Main.elm @@ -0,0 +1,339 @@ +module Main exposing (Model, Msg(..), init, main, subscriptions, update, view, viewLink) + +import Browser +import Browser.Navigation as Nav +import Cart +import Catalog +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Encode as Encode +import Login +import Product +import Signup +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + + +-- MAIN + + +main : Program () Model Msg +main = + Browser.application + { init = init + , view = view + , update = update + , subscriptions = subscriptions + , onUrlChange = UrlChanged + , onUrlRequest = LinkClicked + } + + + +-- MODEL + + +type Route + = LoginPage + | SignupPage + | HomePage + | CatalogPage + | CartPage + | ProductPage Int + | NotFoundPage + + +parseRoute : Parser (Route -> a) a +parseRoute = + oneOf + [ P.map LoginPage (P.s "login") + , P.map HomePage P.top + , P.map CatalogPage (P.s "catalog") + , P.map CartPage (P.s "cart") + , P.map SignupPage (P.s "signup") + , P.map ProductPage (P.s "product" P.int) + + --, P.map ProductPage (P.s "product" int) + ] + + +type alias Model = + { key : Nav.Key + , url : Url.Url + , location : Route + , loginModel : Login.Model + , catalogModel : Catalog.Model + , productModel : Product.Model + , signupModel : Signup.Model + , cartModel : Cart.Model + } + + +init : () -> Url.Url -> Nav.Key -> ( Model, Cmd Msg ) +init flags url key = + let + start = + HomePage + + login = + Login.init + + catalog = + Catalog.init + + product = + Product.init + + signup = + Signup.init + + cart = + Cart.init + in + ( Model key url start login catalog product signup cart, Cmd.none ) + + + +-- UPDATE + + +type Msg + = LinkClicked Browser.UrlRequest + | UrlChanged Url.Url + | LoginMessage Login.Msg + | CatalogMessage Catalog.Msg + | ProductMessage Product.Msg + | SignupMessage Signup.Msg + | CartMessage Cart.Msg + | LogoutPressed + | LogoutSuccess (Result Http.Error ()) + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + LinkClicked urlRequest -> + case urlRequest of + Browser.Internal url -> + ( model, Nav.pushUrl model.key (Url.toString url) ) + + Browser.External href -> + ( model, Nav.load href ) + + LogoutPressed -> + ( model, tryLogout ) + + LogoutSuccess _ -> + ( model, Nav.replaceUrl model.key "/login" ) + + UrlChanged url -> + let + parsedUrl = + P.parse parseRoute url + in + case parsedUrl of + Just CatalogPage -> + ( { model | location = CatalogPage }, Cmd.map CatalogMessage Catalog.fetchProducts ) + + Just (ProductPage id) -> + let + cmds = + List.map (Cmd.map ProductMessage) + [ Product.fetchListing id + , Product.fetchRatings id + ] + in + ( { model | location = ProductPage id }, Cmd.batch cmds ) + + Just CartPage -> + let + cmd = + Cmd.map CartMessage Cart.fetchCartItems + in + ( { model | location = CartPage }, cmd ) + + Just p -> + ( { model | location = p }, Cmd.none ) + + Nothing -> + ( { model | location = NotFoundPage }, Cmd.none ) + + LoginMessage lm -> + let + ( lmn, cmd ) = + Login.update lm model.loginModel + + redir = + case lmn.loginStatus of + Login.LoggedIn -> + Nav.replaceUrl model.key "/catalog" + + _ -> + Cmd.none + in + ( { model | loginModel = lmn }, Cmd.batch [ Cmd.map LoginMessage cmd, redir ] ) + + SignupMessage sm -> + let + ( smn, cmd ) = + Signup.update sm model.signupModel + + redir = + case smn.status of + Signup.CreatedSuccessfully -> + Nav.replaceUrl model.key "/login" + + _ -> + Cmd.none + in + ( { model | signupModel = smn }, Cmd.batch [ Cmd.map SignupMessage cmd, redir ] ) + + CatalogMessage cm -> + let + ( cmn, cmd ) = + Catalog.update cm model.catalogModel + in + ( { model | catalogModel = cmn }, Cmd.map CatalogMessage cmd ) + + CartMessage cm -> + let + ( cmn, cmd ) = + Cart.update cm model.cartModel + in + ( { model | cartModel = cmn }, Cmd.map CartMessage cmd ) + + ProductMessage pm -> + let + ( pmn, cmd ) = + Product.update pm model.productModel + + redir = + case pm of + Product.AddToCartSuccess _ -> + Nav.replaceUrl model.key "/cart" + + _ -> + Cmd.none + in + ( { model | productModel = pmn }, Cmd.batch [ Cmd.map ProductMessage cmd, redir ] ) + + +tryLogout : Cmd Msg +tryLogout = + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/user/logout" + , body = Http.emptyBody + , expect = Http.expectWhatever LogoutSuccess + , timeout = Nothing + , tracker = Nothing + } + + + +-- SUBSCRIPTIONS + + +subscriptions : Model -> Sub Msg +subscriptions _ = + Sub.none + + + +-- VIEW + + +view : Model -> Browser.Document Msg +view model = + case model.location of + LoginPage -> + { title = "Login" + , body = [ Html.map LoginMessage (Login.view model.loginModel) ] + } + + SignupPage -> + { title = "Signup" + , body = [ Html.map SignupMessage (Signup.view model.signupModel) ] + } + + HomePage -> + { title = "URL Interceptor" + , body = + [ text "The current URL is: " + , b [] [ text (Url.toString model.url) ] + , ul [] + [ viewLink "/login" + , viewLink "/catalog" + , viewLink "/cart" + , viewLink "/signup" + ] + ] + } + + NotFoundPage -> + { title = "404 - Not Found" + , body = + [ text "404 - Not Found" + , a [ href "/" ] [ text "Go back >" ] + ] + } + + CatalogPage -> + { title = "Catalog" + , body = pageWrap model (Html.map CatalogMessage (Catalog.view model.catalogModel)) + } + + CartPage -> + { title = "Cart" + , body = pageWrap model (Html.map CartMessage (Cart.view model.cartModel)) + } + + ProductPage item -> + { title = "Product " ++ String.fromInt item + , body = pageWrap model (Html.map ProductMessage (Product.view model.productModel)) + } + + +viewHeader : Model -> Html Msg +viewHeader model = + let + links = + [ ( "Home", "/" ) + , ( "Catalog", "/catalog" ) + , ( "Cart", "/cart" ) + ] + in + div [] + [ List.map + (\( name, loc ) -> + li [] + [ a [ href loc ] [ text name ] + ] + ) + links + ++ [ if model.loginModel.loginStatus /= Login.LoggedIn then + li [] [ a [ href "/login" ] [ text "Login" ] ] + + else + button [ onClick LogoutPressed ] [ text "Logout" ] + ] + |> ul [] + ] + + +pageWrap : Model -> Html Msg -> List (Html Msg) +pageWrap model page = + [ div [] + [ viewHeader model + , page + ] + ] + + +viewLink : String -> Html msg +viewLink path = + li [] [ a [ href path ] [ text path ] ] diff --git a/frontend/src/Product.elm b/frontend/src/Product.elm new file mode 100644 index 0000000..0ea0ce1 --- /dev/null +++ b/frontend/src/Product.elm @@ -0,0 +1,302 @@ +module Product exposing (..) + +import Browser +import Browser.Navigation as Nav +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Decode as D +import Json.Encode as Encode +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + +type SubmitStatus + = SubmitSuccess + | SubmitFail + | Submitting + | NotSubmitted + + +type alias Product = + { id : Int + , name : String + , kind : Maybe String + , price : Float + , description : Maybe String + } + + +emptyProduct = + Product -1 "" Nothing 0 Nothing + + +type alias Rating = + { commentDate : String + , commentText : Maybe String + , customerName : String + , productName : String + , stars : Int + } + + +type alias Model = + { pageStatus : Status + , listing : Product + , ratings : List Rating + , ratingStars : Int + , ratingText : String + , addRatingStatus : SubmitStatus + } + + +type Status + = Loading + | Loaded + | NotLoaded + + +type Msg + = ListingLoaded (Result Http.Error Product) + | RatingsLoaded (Result Http.Error (List Rating)) + | FetchProduct Int + | FetchRatings Int + | AddRatingStars Int + | AddRatingComment String + | AddRatingPressed + | AddRatingSuccess (Result Http.Error ()) + | AddRatingFail + | AddToCartSuccess (Result Http.Error ()) + | AddToCartPressed + + +init : Model +init = + Model NotLoaded emptyProduct [] 0 "" NotSubmitted + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + ListingLoaded res -> + case res of + Ok s -> + ( { model | listing = s, pageStatus = Loaded }, Cmd.none ) + + Err e -> + let + _ = + Debug.log "error" e + in + ( { model | pageStatus = NotLoaded }, Cmd.none ) + + RatingsLoaded res -> + case res of + Ok s -> + ( { model | ratings = s, pageStatus = Loaded }, Cmd.none ) + + Err e -> + let + _ = + Debug.log "error" e + in + ( { model | pageStatus = NotLoaded }, Cmd.none ) + + FetchProduct id -> + ( { model | pageStatus = Loading }, fetchListing id ) + + FetchRatings id -> + ( { model | pageStatus = Loading }, fetchRatings id ) + + AddRatingStars i -> + ( { model | ratingStars = i }, Cmd.none ) + + AddRatingComment s -> + ( { model | ratingText = s }, Cmd.none ) + + AddRatingPressed -> + ( { model | addRatingStatus = Submitting } + , submitRating model + ) + + AddRatingSuccess res -> + case res of + Ok _ -> + ( { model | addRatingStatus = SubmitSuccess }, fetchRatings model.listing.id ) + + Err _ -> + ( { model | addRatingStatus = SubmitFail }, Cmd.none ) + + AddRatingFail -> + ( { model | addRatingStatus = SubmitFail }, Cmd.none ) + + AddToCartPressed -> + ( model, addToCart model ) + + AddToCartSuccess _ -> + ( model, Cmd.none ) + + +decodeProduct : D.Decoder Product +decodeProduct = + D.map5 Product + (D.field "id" D.int) + (D.field "name" D.string) + (D.field "kind" (D.nullable D.string)) + (D.field "price" D.float) + (D.field "description" (D.nullable D.string)) + + +decodeRating : D.Decoder Rating +decodeRating = + D.map5 Rating + (D.field "comment_date" D.string) + (D.field "comment_text" (D.nullable D.string)) + (D.field "customer_name" D.string) + (D.field "product_name" D.string) + (D.field "stars" D.int) + + +decodeRatings : D.Decoder (List Rating) +decodeRatings = + D.list decodeRating + + +fetchListing : Int -> Cmd Msg +fetchListing id = + let + _ = + Debug.log "err" <| "fetching listing " ++ String.fromInt id + in + Http.get + { url = "http://127.0.0.1:7878/product/" ++ String.fromInt id + , expect = Http.expectJson ListingLoaded decodeProduct + } + + +fetchRatings : Int -> Cmd Msg +fetchRatings id = + let + _ = + Debug.log "err" <| "fetching ratings " ++ String.fromInt id + in + Http.get + { url = "http://127.0.0.1:7878/product/reviews/" ++ String.fromInt id + , expect = Http.expectJson RatingsLoaded decodeRatings + } + + +encodeRatingForm : Model -> Encode.Value +encodeRatingForm model = + Encode.object + [ ( "product_id", Encode.int model.listing.id ) + , ( "stars", Encode.int model.ratingStars ) + , ( "comment_text", Encode.string model.ratingText ) + ] + + +submitRating : Model -> Cmd Msg +submitRating model = + let + _ = + Debug.log "err" <| "submitting rating for" ++ String.fromInt model.listing.id + in + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/rating/add" + , body = model |> encodeRatingForm |> Http.jsonBody + , expect = Http.expectWhatever AddRatingSuccess + , timeout = Nothing + , tracker = Nothing + } + + +addToCart : Model -> Cmd Msg +addToCart model = + let + _ = + Debug.log "err" <| "adding to cart: " ++ String.fromInt model.listing.id + in + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/cart/add" + , body = Http.stringBody "applcation/json" <| String.fromInt <| model.listing.id + , expect = Http.expectWhatever AddToCartSuccess + , timeout = Nothing + , tracker = Nothing + } + + +viewStatus : Status -> String +viewStatus s = + case s of + Loading -> + "Loading" + + Loaded -> + "Ready!" + + NotLoaded -> + "Not loaded ..." + + +viewProduct : Product -> Html Msg +viewProduct p = + div [] + [ text p.name + , text <| Maybe.withDefault "" p.kind + , text <| Maybe.withDefault "" p.description + , text <| String.fromFloat p.price + ] + + +viewRating : Rating -> Html Msg +viewRating r = + div [] + [ text <| r.customerName ++ " posted on " + , text <| r.commentDate ++ " " + , text <| Maybe.withDefault "" r.commentText + , text <| " Stars: " ++ String.fromInt r.stars + ] + + +viewInput : String -> String -> String -> (String -> msg) -> Html msg +viewInput t p v toMsg = + input [ type_ t, placeholder p, value v, onInput toMsg ] [] + + +viewStars : Html Msg +viewStars = + ul [] + (List.map + (\i -> button [ onClick (AddRatingStars i) ] [ text <| String.fromInt i ]) + [ 0, 1, 2, 3, 4, 5 ] + ) + + +view : Model -> Html Msg +view model = + case model.pageStatus of + Loading -> + div [] [ text <| viewStatus Loading ] + + _ -> + div [] + [ div [] [ viewProduct model.listing ] + , ul [] (List.map viewRating model.ratings) + , div [] [ text "Add Rating: " ] + , div [] + [ viewStars + , viewInput "text" "Enter Comment Text" model.ratingText AddRatingComment + , button [ onClick AddRatingPressed ] [ text "Submit Rating" ] + ] + , div [] + [ button [ onClick AddToCartPressed ] [ text "Add To Cart" ] + ] + , div [] + [ a [ href "/catalog" ] [ text "Back to catalog" ] + ] + ] diff --git a/frontend/src/Signup.elm b/frontend/src/Signup.elm new file mode 100644 index 0000000..6395b57 --- /dev/null +++ b/frontend/src/Signup.elm @@ -0,0 +1,194 @@ +module Signup exposing (..) + +import Browser +import Browser.Navigation as Nav +import Html exposing (..) +import Html.Attributes exposing (..) +import Html.Events exposing (..) +import Http +import Json.Encode as Encode +import Url +import Url.Parser as P exposing ((), Parser, int, oneOf, s, string) + + +type alias Model = + { username : String + , password : String + , phoneNumber : String + , emailId : String + , address : Maybe String + , status : Status + } + + +type Status + = UsernameTaken + | InvalidPhone + | InvalidEmail + | CreatedSuccessfully + | CreatingUser + | Empty + + +type Msg + = UserEntered String + | PassEntered String + | PhoneEntered String + | EmailEntered String + | AddressEntered String + | CreatePressed + | CreationSuccess (Result Http.Error ()) + | UsernameExists (Result Http.Error String) + | CreationFail + + +init : Model +init = + Model "" "" "" "" Nothing Empty + + +update : Msg -> Model -> ( Model, Cmd Msg ) +update msg model = + case msg of + UserEntered s -> + ( { model | username = s } + , Cmd.none + ) + + PassEntered s -> + ( { model | password = s } + , Cmd.none + ) + + PhoneEntered s -> + let + status = + if String.length s /= 10 || (List.all (not << Char.isDigit) <| String.toList s) then + InvalidPhone + + else + Empty + in + ( { model | phoneNumber = s, status = status } + , Cmd.none + ) + + EmailEntered s -> + let + status = + if not <| String.contains "@" s then + InvalidEmail + + else + Empty + in + ( { model | emailId = s, status = status } + , Cmd.none + ) + + AddressEntered s -> + ( { model | address = Just s } + , Cmd.none + ) + + CreatePressed -> + ( { model | status = CreatingUser }, checkExists model ) + + CreationSuccess res -> + case res of + Ok _ -> + ( { model | status = CreatedSuccessfully }, Cmd.none ) + + Err _ -> + ( model, Cmd.none ) + + CreationFail -> + ( init, Cmd.none ) + + UsernameExists res -> + case res of + Ok "true" -> + ( { model | status = UsernameTaken }, Cmd.none ) + + Ok "false" -> + let + _ = + Debug.log "signup" "Hit create user ..." + in + ( { model | status = CreatingUser }, createUser model ) + + _ -> + ( model, Cmd.none ) + + +encodeCreateUser : Model -> Encode.Value +encodeCreateUser model = + Encode.object + [ ( "username", Encode.string model.username ) + , ( "password", Encode.string model.password ) + , ( "phone_number", Encode.string model.phoneNumber ) + , ( "email_id", Encode.string model.emailId ) + , ( "address", Encode.string <| Maybe.withDefault "" model.address ) + ] + + +checkExists : Model -> Cmd Msg +checkExists model = + Http.post + { url = "http://127.0.0.1:7878/user/existing" + , body = Http.stringBody "application/json" model.username + , expect = Http.expectString UsernameExists + } + + +createUser : Model -> Cmd Msg +createUser model = + Http.riskyRequest + { method = "POST" + , headers = [] + , url = "http://127.0.0.1:7878/user/new" + , body = model |> encodeCreateUser |> Http.jsonBody + , expect = Http.expectWhatever CreationSuccess + , timeout = Nothing + , tracker = Nothing + } + + +viewStatus : Status -> String +viewStatus s = + case s of + UsernameTaken -> + "This username is taken!" + + InvalidPhone -> + "Invalid phone number!" + + InvalidEmail -> + "Invalid email address!" + + CreatedSuccessfully -> + "User created successfully" + + CreatingUser -> + "Creating user ..." + + Empty -> + "" + + +viewInput : String -> String -> String -> (String -> msg) -> Html msg +viewInput t p v toMsg = + input [ type_ t, placeholder p, value v, onInput toMsg ] [] + + +view : Model -> Html Msg +view model = + div [] + [ viewInput "text" "Enter Username" model.username UserEntered + , viewInput "password" "Password" model.password PassEntered + , viewInput "text" "Email" model.emailId EmailEntered + , viewInput "text" "Enter your Phone number" model.phoneNumber PhoneEntered + , viewInput "text" "Enter Shipping address" (Maybe.withDefault "" model.address) AddressEntered + , button [ onClick CreatePressed ] [ text "Create" ] + , text (viewStatus model.status) + ] -- cgit v1.2.3