Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
f9ecc5ce6f
|
|||
|
8ee4e436aa
|
|||
|
064976ed6e
|
|||
|
45a1520a2b
|
|||
|
f7f3ac5dc7
|
|||
|
15454e3686
|
|||
|
63012131d4
|
|||
|
a013e6fe81
|
|||
|
9d8f3368ec
|
|||
|
88a1f181cb
|
|||
|
a6227a80db
|
|||
|
d1a47e2ac6
|
|||
|
86dc084e7d
|
|||
|
d3a9aa2f00
|
|||
|
1820988894
|
|||
| daeeffa995 | |||
|
1e72dbcfc2
|
|||
|
a5b9280a46
|
|||
|
4b9c45dd5f
|
|||
|
529aa80c47
|
|||
|
66e286f267
|
|||
|
cb25dba7de
|
|||
|
9b99b35436
|
|||
|
0c6e9e1228
|
|||
|
faa28749bc
|
|||
|
fb246df01a
|
|||
|
0b153d7990
|
@@ -2,7 +2,7 @@
|
|||||||
name: CI
|
name: CI
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["main", "dev"]
|
branches: ["main"]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
55
.gitea/workflows/docker.yaml
Normal file
55
.gitea/workflows/docker.yaml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: Build docker images
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request: {}
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
|
||||||
|
- name: Setup QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Setup docker buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to Container Registery
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: git.housh.dev
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Extract metadata for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: git.housh.dev/michael/swift-hpa
|
||||||
|
tags: |
|
||||||
|
type=schedule
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}}
|
||||||
|
type=sha
|
||||||
|
type=raw,value=latest
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./docker/Dockerfile
|
||||||
|
platforms: linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
17
.gitea/workflows/release.yaml
Normal file
17
.gitea/workflows/release.yaml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "*.*.*"
|
||||||
|
- "v*.*.*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
#
|
|
||||||
name: Create and publish a Docker image
|
|
||||||
|
|
||||||
# Configures this workflow to run every time a change is pushed to the branch called `release`.
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ['release']
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
|
|
||||||
env:
|
|
||||||
REGISTRY: git.housh.dev
|
|
||||||
IMAGE_NAME: ${{ gitea.repository }}
|
|
||||||
|
|
||||||
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
|
|
||||||
jobs:
|
|
||||||
build-and-push-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
attestations: write
|
|
||||||
id-token: write
|
|
||||||
#
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
|
|
||||||
- name: Log in to the Container registry
|
|
||||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ gitea.actor }}
|
|
||||||
password: ${{ secrets.CONTAINER_TOKEN }}
|
|
||||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
tags: |
|
|
||||||
type=ref,event=branch
|
|
||||||
type=semver,pattern={{version}}
|
|
||||||
type=sha
|
|
||||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
|
||||||
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
|
|
||||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
|
||||||
- name: Build and push Docker image
|
|
||||||
id: push
|
|
||||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: docker/Dockerfile
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
|
|
||||||
# This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)."
|
|
||||||
# - name: Generate artifact attestation
|
|
||||||
# uses: actions/attest-build-provenance@v1
|
|
||||||
# with:
|
|
||||||
# subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
|
||||||
# subject-digest: ${{ steps.push.outputs.digest }}
|
|
||||||
# push-to-registry: true
|
|
||||||
# github-token: ${{ secrets.CONTAINER_TOKEN }}
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -10,3 +10,5 @@ DerivedData/
|
|||||||
.swiftpm/*
|
.swiftpm/*
|
||||||
./hpa.toml
|
./hpa.toml
|
||||||
./Version.*
|
./Version.*
|
||||||
|
/*.json
|
||||||
|
hpa
|
||||||
|
|||||||
20
LICENSE
Normal file
20
LICENSE
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2024 Michael Housh
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"originHash" : "bc31b11e5e7d488e0a9c1bf91cb572d29f782bfd8e43f44157036f8f3d282893",
|
"originHash" : "f1d1e27e3b3b21d41b872325e0174196a323dc438bf3c9d9f99858856b457c96",
|
||||||
"pins" : [
|
"pins" : [
|
||||||
{
|
{
|
||||||
"identity" : "combine-schedulers",
|
"identity" : "combine-schedulers",
|
||||||
@@ -33,8 +33,8 @@
|
|||||||
"kind" : "remoteSourceControl",
|
"kind" : "remoteSourceControl",
|
||||||
"location" : "https://git.housh.dev/michael/swift-cli-doc.git",
|
"location" : "https://git.housh.dev/michael/swift-cli-doc.git",
|
||||||
"state" : {
|
"state" : {
|
||||||
"revision" : "e524056dc65c5ce7a6a77bdea4e5fa0bf724019b",
|
"revision" : "bbace73d974fd3e6985461431692bea773c7c5d8",
|
||||||
"version" : "0.2.0"
|
"version" : "0.2.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ let package = Package(
|
|||||||
.package(url: "https://github.com/pointfreeco/swift-custom-dump.git", from: "1.3.3"),
|
.package(url: "https://github.com/pointfreeco/swift-custom-dump.git", from: "1.3.3"),
|
||||||
.package(url: "https://github.com/pointfreeco/swift-dependencies", from: "1.5.2"),
|
.package(url: "https://github.com/pointfreeco/swift-dependencies", from: "1.5.2"),
|
||||||
.package(url: "https://github.com/m-housh/swift-shell-client.git", from: "0.1.0"),
|
.package(url: "https://github.com/m-housh/swift-shell-client.git", from: "0.1.0"),
|
||||||
.package(url: "https://git.housh.dev/michael/swift-cli-doc.git", from: "0.2.0"),
|
.package(url: "https://git.housh.dev/michael/swift-cli-doc.git", from: "0.2.1"),
|
||||||
.package(url: "https://github.com/m-housh/swift-cli-version.git", from: "0.1.0"),
|
.package(url: "https://github.com/m-housh/swift-cli-version.git", from: "0.1.0"),
|
||||||
.package(url: "https://github.com/LebJe/TOMLKit.git", from: "0.5.0")
|
.package(url: "https://github.com/LebJe/TOMLKit.git", from: "0.5.0")
|
||||||
],
|
],
|
||||||
@@ -70,6 +70,7 @@ let package = Package(
|
|||||||
name: "ConfigurationClient",
|
name: "ConfigurationClient",
|
||||||
dependencies: [
|
dependencies: [
|
||||||
"CodersClient",
|
"CodersClient",
|
||||||
|
"CommandClient",
|
||||||
"FileClient",
|
"FileClient",
|
||||||
.product(name: "Dependencies", package: "swift-dependencies"),
|
.product(name: "Dependencies", package: "swift-dependencies"),
|
||||||
.product(name: "DependenciesMacros", package: "swift-dependencies"),
|
.product(name: "DependenciesMacros", package: "swift-dependencies"),
|
||||||
@@ -114,8 +115,8 @@ let package = Package(
|
|||||||
.target(
|
.target(
|
||||||
name: "PlaybookClient",
|
name: "PlaybookClient",
|
||||||
dependencies: [
|
dependencies: [
|
||||||
"CodersClient",
|
|
||||||
"CommandClient",
|
"CommandClient",
|
||||||
|
"CodersClient",
|
||||||
"ConfigurationClient",
|
"ConfigurationClient",
|
||||||
"FileClient",
|
"FileClient",
|
||||||
.product(name: "Dependencies", package: "swift-dependencies"),
|
.product(name: "Dependencies", package: "swift-dependencies"),
|
||||||
|
|||||||
179
README.md
Normal file
179
README.md
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
# swift-hpa
|
||||||
|
|
||||||
|
A command-line application for managing home performance assessment projects from user defined
|
||||||
|
template repositories.
|
||||||
|
|
||||||
|
This tool is a wrapper around several other command line applications, the primary ones being:
|
||||||
|
|
||||||
|
1. `ansible-playbook`
|
||||||
|
1. `ansible-vault`
|
||||||
|
1. `pandoc`
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
You can install the application using homebrew.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
brew tap michael/formula https://git.housh.dev/michael/homebrew-formula
|
||||||
|
brew install michael/formula/hpa
|
||||||
|
```
|
||||||
|
|
||||||
|
Installation on platforms other than `macOS` are currently being worked on, along with support for
|
||||||
|
running in a `docker` container.
|
||||||
|
|
||||||
|
### Ensuring dependencies are installed
|
||||||
|
|
||||||
|
This application requires some dependencies to be installed on your system, you can install the
|
||||||
|
dependencies with the following command.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa utils install-dependencies
|
||||||
|
```
|
||||||
|
|
||||||
|
The dependencies installed are:
|
||||||
|
|
||||||
|
1. ansible
|
||||||
|
1. imagemagick
|
||||||
|
1. pandoc
|
||||||
|
1. texLive
|
||||||
|
|
||||||
|
It will also download an ansible-playbook that is used to generate output files, template
|
||||||
|
repositories, and encrypt / decrypt variable files. The playbook get's installed to
|
||||||
|
`~/.local/share/hpa/playbook`.
|
||||||
|
|
||||||
|
> NOTE: All commands accept a `--help` option which will display the arguments and options a command
|
||||||
|
> can use, along with example usage of the commands.
|
||||||
|
|
||||||
|
### Configure the application
|
||||||
|
|
||||||
|
When you first download the application you can setup the configuration file for your use case.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa utils generate-config
|
||||||
|
```
|
||||||
|
|
||||||
|
This will create a configuration file in the default location: `~/.config/hpa/config.toml`, which
|
||||||
|
can be edited to suit your needs.
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
The first step to getting started is creating your template. This is used to create projects. The
|
||||||
|
template defines the structure of a project and defines variables which are used to generate the
|
||||||
|
final output files of a project.
|
||||||
|
|
||||||
|
You can generate the template using following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa utils generate-template --path ~/projects/my-template
|
||||||
|
```
|
||||||
|
|
||||||
|
Where the `--path` is where you would like the template to be on your local system.
|
||||||
|
|
||||||
|
It is recommended that after you get your template setup to your liking that you turn it into a
|
||||||
|
`git` repository. Therefore your projects can be pinned to specific version of the template. This
|
||||||
|
allows your template to expand over time.
|
||||||
|
|
||||||
|
Once your template is setup, make sure that your configuration file is setup to point to your
|
||||||
|
customized template.
|
||||||
|
|
||||||
|
## Creating a project
|
||||||
|
|
||||||
|
The first step after having your template defined is to create a project that uses it. The below
|
||||||
|
command will create a template in the `~/consults/my-first-consult` directory.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa create ~/consults/my-first-consult
|
||||||
|
```
|
||||||
|
|
||||||
|
The above assumes that your template is a `git` repository and that your configuration is setup
|
||||||
|
properly. If you want to experiment with a local template that is on your system then you can you
|
||||||
|
can use one of the following command options.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa create --template-dir ~/projects/my-template ~/consults/my-first-consult
|
||||||
|
```
|
||||||
|
|
||||||
|
Or if your configuration has `directory` set in the `template` section.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa create --use-local-template ~/consults/my-first-consult
|
||||||
|
```
|
||||||
|
|
||||||
|
## Generating output files
|
||||||
|
|
||||||
|
Once you have created a project and edited the contents to your liking. You can then generate the
|
||||||
|
final output file (typically a pdf) that can be sent to your customer.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa generate pdf
|
||||||
|
```
|
||||||
|
|
||||||
|
The above _assumes_ that you are inside your project directory, if you would like to generate an
|
||||||
|
output file from outside of your project directory you can specify the path to the project you would
|
||||||
|
like to generate output for.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa generate pdf --project-directory ~/consults/my-first-consult
|
||||||
|
```
|
||||||
|
|
||||||
|
Currently the supported output file types are:
|
||||||
|
|
||||||
|
1. PDF
|
||||||
|
1. LaTeX
|
||||||
|
1. HTML
|
||||||
|
|
||||||
|
## Build command
|
||||||
|
|
||||||
|
The command line tool goes through an intermediate step when generating output, which is called
|
||||||
|
`build`. The build step generates the final output files using defined variables that are located in
|
||||||
|
your project directory or in your template directory. It will decrypt any sensitive data stored in
|
||||||
|
`vault` files as well.
|
||||||
|
|
||||||
|
These files get placed inside a directory in the project, default location is `.build`. The generate
|
||||||
|
commands by default build the project for you, unless you specify the `--no-build` option.
|
||||||
|
|
||||||
|
You can explore the contents of the `.build` directory or if you'd like to separate the build and
|
||||||
|
generate steps, you can build a project using the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa build
|
||||||
|
```
|
||||||
|
|
||||||
|
The above _assumes_ that you are inside your project directory, if you would like to generate an
|
||||||
|
output file from outside of your project directory you can specify the path to the project you would
|
||||||
|
like to generate output for.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hpa build --project-directory ~/consults/my-first-consult
|
||||||
|
```
|
||||||
|
|
||||||
|
## Some General Usage Notes
|
||||||
|
|
||||||
|
There is often a lot of output to the console when running commands, which can be problematic if you
|
||||||
|
want to pipe the output into other command line tools, so all options accept a `-q | --quiet` flag
|
||||||
|
which will suppress logging output and allow piping into other commands.
|
||||||
|
|
||||||
|
Along the similar line, if you would like to increase the logging output then all commands accept
|
||||||
|
`-v | --verbose` that will increase the logging output. This can be passed multiple times, so for
|
||||||
|
the highest log output you can do `-vvv`.
|
||||||
|
|
||||||
|
## Uninstalling
|
||||||
|
|
||||||
|
You can uninstall the application using:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
brew uninstall hpa
|
||||||
|
```
|
||||||
|
|
||||||
|
Also remove the configuration and playbook directories.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
rm -rf ~/.config/hpa
|
||||||
|
rm -rf ~/.local/share/hpa
|
||||||
|
```
|
||||||
|
|
||||||
|
## LICENSE
|
||||||
|
|
||||||
|
This project is licensed under the `MIT` license.
|
||||||
|
|
||||||
|
[See license](https://git.housh.dev/michael/swift-hpa/LICENSE)
|
||||||
24
Release.md
Normal file
24
Release.md
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Release Workflow Steps
|
||||||
|
|
||||||
|
This is a reminder of the steps used to create a release and update the homebrew formula.
|
||||||
|
|
||||||
|
> Note: These steps apply to the version hosted on `gitea`, on `github` more of these steps can be
|
||||||
|
> automated in `ci`, but there are no `macOS` host runners currently in `gitea`, so the bottles need
|
||||||
|
> built on `macOS`.
|
||||||
|
|
||||||
|
1. Update the version in `Sources/hpa/Version.swift`.
|
||||||
|
1. Tag the commit with the next version tag.
|
||||||
|
1. Push the tagged commit, this will initiate the release being created.
|
||||||
|
1. Get the `sha` of the `*.tar.gz` in the release.
|
||||||
|
1. `just get-release-sha`
|
||||||
|
1. Update the homebrew formula url, sha256, and version at top of the homebrew formula.
|
||||||
|
1. `cd $(brew --repo michael/formula)`
|
||||||
|
1. Build and generate a homebrew bottle.
|
||||||
|
1. `just bottle`
|
||||||
|
1. Update the `bottle do` section of homebrew formula with output during previous step.
|
||||||
|
1. Also make sure the `root_url` in the bottle section points to the new release.
|
||||||
|
1. Upload the bottle `*.tar.gz` file that was created to the release.
|
||||||
|
1. Generate a pull-request to the formula repo.
|
||||||
|
1. Generate a pull-request to this repo to merge into main.
|
||||||
|
1. Remove the bottle from current directory.
|
||||||
|
1. `just remove-bottles`
|
||||||
@@ -4,10 +4,10 @@ import DependenciesMacros
|
|||||||
import Foundation
|
import Foundation
|
||||||
import ShellClient
|
import ShellClient
|
||||||
|
|
||||||
public extension DependencyValues {
|
extension DependencyValues {
|
||||||
|
|
||||||
/// Runs shell commands.
|
/// Runs shell commands.
|
||||||
var commandClient: CommandClient {
|
public var commandClient: CommandClient {
|
||||||
get { self[CommandClient.self] }
|
get { self[CommandClient.self] }
|
||||||
set { self[CommandClient.self] = newValue }
|
set { self[CommandClient.self] = newValue }
|
||||||
}
|
}
|
||||||
@@ -67,12 +67,13 @@ public struct CommandClient: Sendable {
|
|||||||
in workingDirectory: String? = nil,
|
in workingDirectory: String? = nil,
|
||||||
_ arguments: [String]
|
_ arguments: [String]
|
||||||
) async throws {
|
) async throws {
|
||||||
try await runCommand(.init(
|
try await runCommand(
|
||||||
arguments: arguments,
|
.init(
|
||||||
quiet: quiet,
|
arguments: arguments,
|
||||||
shell: shell,
|
quiet: quiet,
|
||||||
workingDirectory: workingDirectory
|
shell: shell,
|
||||||
))
|
workingDirectory: workingDirectory
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs a shell command.
|
/// Runs a shell command.
|
||||||
@@ -161,19 +162,21 @@ extension CommandClient: DependencyKey {
|
|||||||
.init { options in
|
.init { options in
|
||||||
@Dependency(\.asyncShellClient) var shellClient
|
@Dependency(\.asyncShellClient) var shellClient
|
||||||
if !options.quiet {
|
if !options.quiet {
|
||||||
try await shellClient.foreground(.init(
|
try await shellClient.foreground(
|
||||||
shell: .init(options.shell),
|
.init(
|
||||||
environment: environment,
|
shell: .init(options.shell),
|
||||||
in: options.workingDirectory,
|
environment: environment,
|
||||||
options.arguments
|
in: options.workingDirectory,
|
||||||
))
|
options.arguments
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
try await shellClient.background(.init(
|
try await shellClient.background(
|
||||||
shell: .init(options.shell),
|
.init(
|
||||||
environment: environment,
|
shell: .init(options.shell),
|
||||||
in: options.workingDirectory,
|
environment: environment,
|
||||||
options.arguments
|
in: options.workingDirectory,
|
||||||
))
|
options.arguments
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,12 +187,12 @@ extension CommandClient: DependencyKey {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@_spi(Internal)
|
@_spi(Internal)
|
||||||
public extension CommandClient {
|
extension CommandClient {
|
||||||
|
|
||||||
/// Create a command client that can capture the arguments / options.
|
/// Create a command client that can capture the arguments / options.
|
||||||
///
|
///
|
||||||
/// This is used for testing.
|
/// This is used for testing.
|
||||||
static func capturing(_ client: CapturingClient) -> Self {
|
public static func capturing(_ client: CapturingClient) -> Self {
|
||||||
.init { options in
|
.init { options in
|
||||||
await client.set(options)
|
await client.set(options)
|
||||||
}
|
}
|
||||||
@@ -198,7 +201,7 @@ public extension CommandClient {
|
|||||||
/// Captures the arguments / options passed into the command client's run commands.
|
/// Captures the arguments / options passed into the command client's run commands.
|
||||||
///
|
///
|
||||||
@dynamicMemberLookup
|
@dynamicMemberLookup
|
||||||
actor CapturingClient: Sendable {
|
public actor CapturingClient: Sendable {
|
||||||
public private(set) var options: RunCommandOptions?
|
public private(set) var options: RunCommandOptions?
|
||||||
|
|
||||||
public init() {}
|
public init() {}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import CodersClient
|
import CodersClient
|
||||||
|
import CommandClient
|
||||||
import Dependencies
|
import Dependencies
|
||||||
import DependenciesMacros
|
import DependenciesMacros
|
||||||
import FileClient
|
import FileClient
|
||||||
@@ -155,6 +156,7 @@ struct LiveConfigurationClient {
|
|||||||
private let environment: [String: String]
|
private let environment: [String: String]
|
||||||
|
|
||||||
@Dependency(\.coders) var coders
|
@Dependency(\.coders) var coders
|
||||||
|
@Dependency(\.commandClient) var commandClient
|
||||||
@Dependency(\.fileClient) var fileManager
|
@Dependency(\.fileClient) var fileManager
|
||||||
@Dependency(\.logger) var logger
|
@Dependency(\.logger) var logger
|
||||||
|
|
||||||
@@ -245,19 +247,13 @@ struct LiveConfigurationClient {
|
|||||||
try await fileManager.createDirectory(fileDirectory)
|
try await fileManager.createDirectory(fileDirectory)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: The hpa file needs to be copied somewhere on the system during install and
|
|
||||||
// not use bundle, as it only works if the tool was built on the users system.
|
|
||||||
if case .toml = file {
|
if case .toml = file {
|
||||||
// In the case of toml, we copy the internal resource that includes
|
// Copy the file using curl, because when installed as a pre-built binary we
|
||||||
// usage comments in the file.
|
// don't have access to bundled resources.
|
||||||
guard let resourceFile = Bundle.module.url(
|
try await commandClient.run(
|
||||||
forResource: HPAKey.resourceFileName,
|
quiet: true,
|
||||||
withExtension: HPAKey.resourceFileExtension
|
["curl", HPAKey.tomlConfigUrl, "--output", fileUrl.path]
|
||||||
) else {
|
)
|
||||||
throw ConfigurationError.resourceNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
try await fileManager.copy(resourceFile, fileUrl)
|
|
||||||
} else {
|
} else {
|
||||||
// Json does not allow comments, so we write the mock configuration
|
// Json does not allow comments, so we write the mock configuration
|
||||||
// to the file path.
|
// to the file path.
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ public enum HPAKey {
|
|||||||
public static let resourceFileExtension = "toml"
|
public static let resourceFileExtension = "toml"
|
||||||
public static let defaultFileName = "config.toml"
|
public static let defaultFileName = "config.toml"
|
||||||
public static let defaultFileNameWithoutExtension = "config"
|
public static let defaultFileNameWithoutExtension = "config"
|
||||||
|
public static let tomlConfigUrl = "https://git.housh.dev/michael/swift-hpa/raw/branch/main/Sources/ConfigurationClient/Resources/hpa.toml"
|
||||||
}
|
}
|
||||||
|
|
||||||
extension [String: String] {
|
extension [String: String] {
|
||||||
|
|||||||
@@ -120,7 +120,11 @@ struct LiveFileClient: Sendable {
|
|||||||
|
|
||||||
func isDirectory(_ url: URL) -> Bool {
|
func isDirectory(_ url: URL) -> Bool {
|
||||||
var isDirectory: ObjCBool = false
|
var isDirectory: ObjCBool = false
|
||||||
manager.fileExists(atPath: url.cleanFilePath, isDirectory: &isDirectory)
|
#if os(Linux)
|
||||||
|
_ = manager.fileExists(atPath: url.cleanFilePath, isDirectory: &isDirectory)
|
||||||
|
#else
|
||||||
|
manager.fileExists(atPath: url.cleanFilePath, isDirectory: &isDirectory)
|
||||||
|
#endif
|
||||||
return isDirectory.boolValue
|
return isDirectory.boolValue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,26 +6,72 @@ import PlaybookClient
|
|||||||
|
|
||||||
extension PandocClient.RunOptions {
|
extension PandocClient.RunOptions {
|
||||||
|
|
||||||
|
/// Runs a pandoc conversion on the project generating the given file type.
|
||||||
|
///
|
||||||
|
/// - Parameters:
|
||||||
|
/// - fileType: The file type to convert to.
|
||||||
|
/// - environment: The environment variables.
|
||||||
|
///
|
||||||
|
/// - Returns: File path to the converted output file.
|
||||||
func run(
|
func run(
|
||||||
_ fileType: PandocClient.FileType,
|
_ fileType: PandocClient.FileType,
|
||||||
_ environment: [String: String]
|
_ environment: [String: String]
|
||||||
|
) async throws -> String {
|
||||||
|
@Dependency(\.logger) var logger
|
||||||
|
|
||||||
|
let ensuredOptions = try await self.ensuredOptions(fileType)
|
||||||
|
|
||||||
|
let projectDirectory = self.projectDirectory ?? environment["PWD"]
|
||||||
|
|
||||||
|
guard let projectDirectory else {
|
||||||
|
throw ProjectDirectoryNotSpecified()
|
||||||
|
}
|
||||||
|
|
||||||
|
try await buildProject(projectDirectory, ensuredOptions)
|
||||||
|
|
||||||
|
let outputDirectory = self.outputDirectory ?? projectDirectory
|
||||||
|
let outputPath = "\(outputDirectory)/\(ensuredOptions.ensuredFilename)"
|
||||||
|
|
||||||
|
let arguments = ensuredOptions.makeArguments(
|
||||||
|
outputPath: outputPath,
|
||||||
|
projectDirectory: projectDirectory
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug("Pandoc arguments: \(arguments)")
|
||||||
|
return try await runCommand(arguments, outputPath)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs a shell command with the given arguments, returning the passed in output path
|
||||||
|
/// so the command can be chained, if needed.
|
||||||
|
///
|
||||||
|
@discardableResult
|
||||||
|
func runCommand(
|
||||||
|
_ arguments: [String],
|
||||||
|
_ outputPath: String
|
||||||
) async throws -> String {
|
) async throws -> String {
|
||||||
@Dependency(\.commandClient) var commandClient
|
@Dependency(\.commandClient) var commandClient
|
||||||
|
@Dependency(\.logger) var logger
|
||||||
|
logger.debug("Running shell command with arguments: \(arguments)")
|
||||||
|
return try await commandClient.run(logging: loggingOptions, quiet: quiet, shell: shell) {
|
||||||
|
(arguments, outputPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the project if necessary, before running the shell command that builds the final
|
||||||
|
/// output file(s).
|
||||||
|
///
|
||||||
|
func buildProject(
|
||||||
|
_ projectDirectory: String,
|
||||||
|
_ ensuredOptions: EnsuredPandocOptions
|
||||||
|
) async throws {
|
||||||
@Dependency(\.logger) var logger
|
@Dependency(\.logger) var logger
|
||||||
@Dependency(\.playbookClient) var playbookClient
|
@Dependency(\.playbookClient) var playbookClient
|
||||||
|
|
||||||
return try await commandClient.run(logging: loggingOptions, quiet: quiet, shell: shell) {
|
if shouldBuildProject {
|
||||||
let ensuredOptions = try await self.ensuredOptions(fileType)
|
logger.debug("Building project...")
|
||||||
|
try await playbookClient.run.buildProject(
|
||||||
let projectDirectory = self.projectDirectory ?? environment["PWD"]
|
.init(
|
||||||
|
|
||||||
guard let projectDirectory else {
|
|
||||||
throw ProjectDirectoryNotSpecified()
|
|
||||||
}
|
|
||||||
|
|
||||||
if shouldBuildProject {
|
|
||||||
logger.debug("Building project...")
|
|
||||||
try await playbookClient.run.buildProject(.init(
|
|
||||||
projectDirectory: projectDirectory,
|
projectDirectory: projectDirectory,
|
||||||
shared: .init(
|
shared: .init(
|
||||||
extraOptions: nil,
|
extraOptions: nil,
|
||||||
@@ -34,23 +80,28 @@ extension PandocClient.RunOptions {
|
|||||||
quiet: quiet,
|
quiet: quiet,
|
||||||
shell: shell
|
shell: shell
|
||||||
)
|
)
|
||||||
))
|
)
|
||||||
}
|
)
|
||||||
|
}
|
||||||
|
|
||||||
let outputDirectory = self.outputDirectory ?? projectDirectory
|
// Build latex file pre-html, so that we can properly convert the latex document
|
||||||
let outputPath = "\(outputDirectory)/\(ensuredOptions.ensuredExtensionFileName)"
|
// into an html document.
|
||||||
|
if ensuredOptions.outputFileType == .html {
|
||||||
let arguments = ensuredOptions.makeArguments(
|
logger.debug("Building latex, pre-html conversion...")
|
||||||
|
let outputPath = "\(ensuredOptions.buildDirectory)/\(EnsuredPandocOptions.latexFilename)"
|
||||||
|
let arguments = ensuredOptions.preHtmlLatexOptions.makeArguments(
|
||||||
outputPath: outputPath,
|
outputPath: outputPath,
|
||||||
projectDirectory: projectDirectory
|
projectDirectory: projectDirectory
|
||||||
)
|
)
|
||||||
|
try await runCommand(arguments, outputPath)
|
||||||
logger.debug("Pandoc arguments: \(arguments)")
|
|
||||||
|
|
||||||
return (arguments, outputPath)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates the ensured/parsed options for a pandoc conversion.
|
||||||
|
///
|
||||||
|
/// - Parameter fileType: The file type we're converting to.
|
||||||
|
///
|
||||||
|
/// - Returns: The ensured options.
|
||||||
func ensuredOptions(
|
func ensuredOptions(
|
||||||
_ fileType: PandocClient.FileType
|
_ fileType: PandocClient.FileType
|
||||||
) async throws -> EnsuredPandocOptions {
|
) async throws -> EnsuredPandocOptions {
|
||||||
@@ -69,6 +120,7 @@ extension PandocClient.RunOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
extension PandocClient.FileType {
|
extension PandocClient.FileType {
|
||||||
|
/// Represents the appropriate file extension for a file type.
|
||||||
var fileExtension: String {
|
var fileExtension: String {
|
||||||
switch self {
|
switch self {
|
||||||
case .html: return "html"
|
case .html: return "html"
|
||||||
@@ -78,8 +130,14 @@ extension PandocClient.FileType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents pandoc options that get parsed based on the given run options, configuration, etc.
|
||||||
|
///
|
||||||
|
/// This set's potentially optional values into real values that are required for pandoc to run
|
||||||
|
/// properly and convert files for the given file type conversion.
|
||||||
@_spi(Internal)
|
@_spi(Internal)
|
||||||
public struct EnsuredPandocOptions: Equatable, Sendable {
|
public struct EnsuredPandocOptions: Equatable, Sendable {
|
||||||
|
public static let latexFilename = "Report.tex"
|
||||||
|
|
||||||
public let buildDirectory: String
|
public let buildDirectory: String
|
||||||
public let extraOptions: [String]?
|
public let extraOptions: [String]?
|
||||||
public let files: [String]
|
public let files: [String]
|
||||||
@@ -88,7 +146,9 @@ public struct EnsuredPandocOptions: Equatable, Sendable {
|
|||||||
public let outputFileType: PandocClient.FileType
|
public let outputFileType: PandocClient.FileType
|
||||||
public let pdfEngine: String?
|
public let pdfEngine: String?
|
||||||
|
|
||||||
public var ensuredExtensionFileName: String {
|
/// Ensures the output filename includes the file extension, so that pandoc
|
||||||
|
/// can properly convert the files.
|
||||||
|
public var ensuredFilename: String {
|
||||||
let extensionString = ".\(outputFileType.fileExtension)"
|
let extensionString = ".\(outputFileType.fileExtension)"
|
||||||
|
|
||||||
if !outputFileName.hasSuffix(extensionString) {
|
if !outputFileName.hasSuffix(extensionString) {
|
||||||
@@ -97,14 +157,33 @@ public struct EnsuredPandocOptions: Equatable, Sendable {
|
|||||||
return outputFileName
|
return outputFileName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates the options required for building the latex file that is needed
|
||||||
|
/// to convert the project to an html output file.
|
||||||
|
var preHtmlLatexOptions: Self {
|
||||||
|
.init(
|
||||||
|
buildDirectory: buildDirectory,
|
||||||
|
extraOptions: extraOptions,
|
||||||
|
files: files,
|
||||||
|
includeInHeader: includeInHeader,
|
||||||
|
outputFileName: Self.latexFilename,
|
||||||
|
outputFileType: .latex,
|
||||||
|
pdfEngine: nil
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate arguments for the pandoc shell command based on the parsed options
|
||||||
|
/// for a given conversion.
|
||||||
|
///
|
||||||
func makeArguments(
|
func makeArguments(
|
||||||
outputPath: String,
|
outputPath: String,
|
||||||
projectDirectory: String
|
projectDirectory: String
|
||||||
) -> [String] {
|
) -> [String] {
|
||||||
var arguments = [PandocClient.Constants.pandocCommand]
|
var arguments = [PandocClient.Constants.pandocCommand]
|
||||||
|
|
||||||
arguments += includeInHeader.map {
|
if outputFileType != .html {
|
||||||
"--include-in-header=\(projectDirectory)/\(buildDirectory)/\($0)"
|
arguments += includeInHeader.map {
|
||||||
|
"--include-in-header=\(projectDirectory)/\(buildDirectory)/\($0)"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let pdfEngine {
|
if let pdfEngine {
|
||||||
@@ -117,8 +196,12 @@ public struct EnsuredPandocOptions: Equatable, Sendable {
|
|||||||
arguments.append(contentsOf: extraOptions)
|
arguments.append(contentsOf: extraOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
arguments += files.map {
|
if outputFileType != .html {
|
||||||
"\(projectDirectory)/\(buildDirectory)/\($0)"
|
arguments += files.map {
|
||||||
|
"\(projectDirectory)/\(buildDirectory)/\($0)"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
arguments.append("\(projectDirectory)/\(buildDirectory)/\(Self.latexFilename)")
|
||||||
}
|
}
|
||||||
|
|
||||||
return arguments
|
return arguments
|
||||||
@@ -145,15 +228,15 @@ public func ensurePandocOptions(
|
|||||||
}
|
}
|
||||||
|
|
||||||
@_spi(Internal)
|
@_spi(Internal)
|
||||||
public extension PandocClient.FileType {
|
extension PandocClient.FileType {
|
||||||
func parsePdfEngine(
|
public func parsePdfEngine(
|
||||||
_ configuration: Configuration.Generate?,
|
_ configuration: Configuration.Generate?,
|
||||||
_ defaults: Configuration.Generate
|
_ defaults: Configuration.Generate
|
||||||
) -> String? {
|
) -> String? {
|
||||||
switch self {
|
switch self {
|
||||||
case .html, .latex:
|
case .html, .latex:
|
||||||
return nil
|
return nil
|
||||||
case let .pdf(engine: engine):
|
case .pdf(let engine):
|
||||||
if let engine {
|
if let engine {
|
||||||
return engine
|
return engine
|
||||||
} else if let engine = configuration?.pdfEngine {
|
} else if let engine = configuration?.pdfEngine {
|
||||||
@@ -168,8 +251,8 @@ public extension PandocClient.FileType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@_spi(Internal)
|
@_spi(Internal)
|
||||||
public extension PandocClient.RunOptions {
|
extension PandocClient.RunOptions {
|
||||||
func parseFiles(
|
public func parseFiles(
|
||||||
_ configuration: Configuration.Generate?,
|
_ configuration: Configuration.Generate?,
|
||||||
_ defaults: Configuration.Generate
|
_ defaults: Configuration.Generate
|
||||||
) -> [String] {
|
) -> [String] {
|
||||||
@@ -187,7 +270,7 @@ public extension PandocClient.RunOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseIncludeInHeader(
|
public func parseIncludeInHeader(
|
||||||
_ configuration: Configuration.Generate?,
|
_ configuration: Configuration.Generate?,
|
||||||
_ defaults: Configuration.Generate
|
_ defaults: Configuration.Generate
|
||||||
) -> [String] {
|
) -> [String] {
|
||||||
@@ -205,7 +288,7 @@ public extension PandocClient.RunOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseOutputFileName(
|
public func parseOutputFileName(
|
||||||
_ configuration: Configuration.Generate?,
|
_ configuration: Configuration.Generate?,
|
||||||
_ defaults: Configuration.Generate
|
_ defaults: Configuration.Generate
|
||||||
) -> String {
|
) -> String {
|
||||||
@@ -223,7 +306,7 @@ public extension PandocClient.RunOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseBuildDirectory(
|
public func parseBuildDirectory(
|
||||||
_ configuration: Configuration.Generate?,
|
_ configuration: Configuration.Generate?,
|
||||||
_ defaults: Configuration.Generate
|
_ defaults: Configuration.Generate
|
||||||
) -> String {
|
) -> String {
|
||||||
|
|||||||
@@ -4,14 +4,14 @@ import Dependencies
|
|||||||
import DependenciesMacros
|
import DependenciesMacros
|
||||||
import Foundation
|
import Foundation
|
||||||
|
|
||||||
public extension DependencyValues {
|
extension DependencyValues {
|
||||||
|
|
||||||
/// Represents interactions with the `pandoc` command line application.
|
/// Represents interactions with the `pandoc` command line application.
|
||||||
///
|
///
|
||||||
/// The `pandoc` command line application is used to generate the final output
|
/// The `pandoc` command line application is used to generate the final output
|
||||||
/// documents from a home performance assessment project.
|
/// documents from a home performance assessment project.
|
||||||
///
|
///
|
||||||
var pandocClient: PandocClient {
|
public var pandocClient: PandocClient {
|
||||||
get { self[PandocClient.self] }
|
get { self[PandocClient.self] }
|
||||||
set { self[PandocClient.self] = newValue }
|
set { self[PandocClient.self] = newValue }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -81,6 +81,8 @@ extension PlaybookClient.RunPlaybook.SharedRunOptions {
|
|||||||
) async throws -> T {
|
) async throws -> T {
|
||||||
@Dependency(\.commandClient) var commandClient
|
@Dependency(\.commandClient) var commandClient
|
||||||
|
|
||||||
|
try await ensurePlaybookExists()
|
||||||
|
|
||||||
return try await commandClient.run(
|
return try await commandClient.run(
|
||||||
logging: loggingOptions,
|
logging: loggingOptions,
|
||||||
quiet: quiet,
|
quiet: quiet,
|
||||||
@@ -101,6 +103,17 @@ extension PlaybookClient.RunPlaybook.SharedRunOptions {
|
|||||||
return (arguments, output)
|
return (arguments, output)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func ensurePlaybookExists() async throws {
|
||||||
|
@Dependency(\.fileClient) var fileClient
|
||||||
|
@Dependency(\.playbookClient.repository) var repository
|
||||||
|
|
||||||
|
let directory = try await repository.directory()
|
||||||
|
let exists = try await fileClient.isDirectory(URL(filePath: directory))
|
||||||
|
if !exists {
|
||||||
|
try await repository.install()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@_spi(Internal)
|
@_spi(Internal)
|
||||||
|
|||||||
@@ -40,7 +40,10 @@ struct CreateCommand: AsyncParsableCommand {
|
|||||||
var templateDir: String?
|
var templateDir: String?
|
||||||
|
|
||||||
@Flag(
|
@Flag(
|
||||||
name: .shortAndLong,
|
name: [
|
||||||
|
.short,
|
||||||
|
.customLong("use-local-template")
|
||||||
|
],
|
||||||
help: "Force using a local template directory."
|
help: "Force using a local template directory."
|
||||||
)
|
)
|
||||||
var localTemplateDir = false
|
var localTemplateDir = false
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
// Do not set this variable, it is set during the build process.
|
// Do not set this variable, it is set during the build process.
|
||||||
let VERSION: String? = nil
|
let VERSION: String? = "0.1.5"
|
||||||
|
|||||||
7
TODO.md
Normal file
7
TODO.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# TODO
|
||||||
|
|
||||||
|
- [ ] Build docker images in ci.
|
||||||
|
- [ ] Generate documentation for docker usage.
|
||||||
|
- [ ] Generally need to create a local wrapper script to mount volumes.
|
||||||
|
- [ ] Completions can be installed / used with the wrapper script by calling
|
||||||
|
`docker run --it --rm <image> --generate-completion-script <shell> > /path/to/completions/on/local`
|
||||||
@@ -18,6 +18,8 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
@Test(arguments: ["config.toml", "config.json"])
|
@Test(arguments: ["config.toml", "config.json"])
|
||||||
func generateConfigFile(fileName: String) async throws {
|
func generateConfigFile(fileName: String) async throws {
|
||||||
try await withTestLogger(key: "generateConfigFile") {
|
try await withTestLogger(key: "generateConfigFile") {
|
||||||
|
$0.asyncShellClient = .liveValue
|
||||||
|
$0.commandClient = .liveValue
|
||||||
$0.coders = .liveValue
|
$0.coders = .liveValue
|
||||||
$0.fileClient = .liveValue
|
$0.fileClient = .liveValue
|
||||||
} operation: {
|
} operation: {
|
||||||
@@ -35,18 +37,6 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
#expect(FileManager.default.fileExists(atPath: tempFile.cleanFilePath))
|
#expect(FileManager.default.fileExists(atPath: tempFile.cleanFilePath))
|
||||||
#expect(fileClient.fileExists(tempFile))
|
#expect(fileClient.fileExists(tempFile))
|
||||||
#expect(output == tempFile.cleanFilePath)
|
#expect(output == tempFile.cleanFilePath)
|
||||||
|
|
||||||
// Ensure that we do not overwrite files if they exist.
|
|
||||||
do {
|
|
||||||
_ = try await configuration.generate(.init(
|
|
||||||
force: false,
|
|
||||||
json: fileName.hasSuffix("json"),
|
|
||||||
path: .file(File(tempFile)!)
|
|
||||||
))
|
|
||||||
#expect(Bool(false))
|
|
||||||
} catch {
|
|
||||||
#expect(Bool(true))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -54,6 +44,8 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
@Test(arguments: ["config.toml", "config.json", nil])
|
@Test(arguments: ["config.toml", "config.json", nil])
|
||||||
func loadConfigFile(fileName: String?) async throws {
|
func loadConfigFile(fileName: String?) async throws {
|
||||||
try await withTestLogger(key: "generateConfigFile") {
|
try await withTestLogger(key: "generateConfigFile") {
|
||||||
|
$0.asyncShellClient = .liveValue
|
||||||
|
$0.commandClient = .liveValue
|
||||||
$0.coders = .liveValue
|
$0.coders = .liveValue
|
||||||
$0.fileClient = .liveValue
|
$0.fileClient = .liveValue
|
||||||
} operation: {
|
} operation: {
|
||||||
@@ -77,6 +69,8 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
@Test(arguments: ["config.toml", "config.json", ".hparc.json", ".hparc.toml"])
|
@Test(arguments: ["config.toml", "config.json", ".hparc.json", ".hparc.toml"])
|
||||||
func findConfiguration(fileName: String) async throws {
|
func findConfiguration(fileName: String) async throws {
|
||||||
try await withTestLogger(key: "findConfiguration") {
|
try await withTestLogger(key: "findConfiguration") {
|
||||||
|
$0.asyncShellClient = .liveValue
|
||||||
|
$0.commandClient = .liveValue
|
||||||
$0.fileClient = .liveValue
|
$0.fileClient = .liveValue
|
||||||
} operation: {
|
} operation: {
|
||||||
@Dependency(\.logger) var logger
|
@Dependency(\.logger) var logger
|
||||||
@@ -106,6 +100,8 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
@Test(arguments: ["config.toml", "config.json", ".hparc.json", ".hparc.toml"])
|
@Test(arguments: ["config.toml", "config.json", ".hparc.json", ".hparc.toml"])
|
||||||
func findXdgConfiguration(fileName: String) async throws {
|
func findXdgConfiguration(fileName: String) async throws {
|
||||||
try await withTestLogger(key: "findXdgConfiguration") {
|
try await withTestLogger(key: "findXdgConfiguration") {
|
||||||
|
$0.asyncShellClient = .liveValue
|
||||||
|
$0.commandClient = .liveValue
|
||||||
$0.fileClient = .liveValue
|
$0.fileClient = .liveValue
|
||||||
} operation: {
|
} operation: {
|
||||||
@Dependency(\.logger) var logger
|
@Dependency(\.logger) var logger
|
||||||
@@ -145,6 +141,8 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
@Test
|
@Test
|
||||||
func writeCreatesBackupFile() async throws {
|
func writeCreatesBackupFile() async throws {
|
||||||
try await withDependencies {
|
try await withDependencies {
|
||||||
|
$0.asyncShellClient = .liveValue
|
||||||
|
$0.commandClient = .liveValue
|
||||||
$0.fileClient = .liveValue
|
$0.fileClient = .liveValue
|
||||||
} operation: {
|
} operation: {
|
||||||
let client = ConfigurationClient.liveValue
|
let client = ConfigurationClient.liveValue
|
||||||
@@ -152,7 +150,7 @@ struct ConfigurationClientTests: TestCase {
|
|||||||
try await withGeneratedConfigFile(named: "config.toml", client: client) { configFile in
|
try await withGeneratedConfigFile(named: "config.toml", client: client) { configFile in
|
||||||
@Dependency(\.fileClient) var fileClient
|
@Dependency(\.fileClient) var fileClient
|
||||||
|
|
||||||
let backupUrl = configFile.url.appendingPathExtension(".back")
|
let backupUrl = configFile.url.appendingPathExtension("back")
|
||||||
#expect(fileClient.fileExists(backupUrl) == false)
|
#expect(fileClient.fileExists(backupUrl) == false)
|
||||||
|
|
||||||
let config = Configuration()
|
let config = Configuration()
|
||||||
|
|||||||
@@ -23,7 +23,11 @@ struct FileClientTests {
|
|||||||
let fileClient = FileClient.liveValue
|
let fileClient = FileClient.liveValue
|
||||||
|
|
||||||
let vaultFilePath = url.appending(path: fileName)
|
let vaultFilePath = url.appending(path: fileName)
|
||||||
FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
#if os(Linux)
|
||||||
|
_ = FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
||||||
|
#else
|
||||||
|
FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
||||||
|
#endif
|
||||||
let output = try await fileClient.findVaultFile(url)!
|
let output = try await fileClient.findVaultFile(url)!
|
||||||
|
|
||||||
#expect(output.cleanFilePath == vaultFilePath.cleanFilePath)
|
#expect(output.cleanFilePath == vaultFilePath.cleanFilePath)
|
||||||
@@ -43,7 +47,11 @@ struct FileClientTests {
|
|||||||
try await fileClient.createDirectory(subDir)
|
try await fileClient.createDirectory(subDir)
|
||||||
|
|
||||||
let vaultFilePath = subDir.appending(path: fileName)
|
let vaultFilePath = subDir.appending(path: fileName)
|
||||||
FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
#if os(Linux)
|
||||||
|
_ = FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
||||||
|
#else
|
||||||
|
FileManager.default.createFile(atPath: vaultFilePath.cleanFilePath, contents: nil)
|
||||||
|
#endif
|
||||||
let output = try await fileClient.findVaultFile(url)!
|
let output = try await fileClient.findVaultFile(url)!
|
||||||
|
|
||||||
#expect(output.cleanFilePath == vaultFilePath.cleanFilePath)
|
#expect(output.cleanFilePath == vaultFilePath.cleanFilePath)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
@_spi(Internal) import ConfigurationClient
|
@_spi(Internal) import ConfigurationClient
|
||||||
@_spi(Internal) import PandocClient
|
@_spi(Internal) import PandocClient
|
||||||
import PlaybookClient
|
import PlaybookClient
|
||||||
import Testing
|
|
||||||
import TestSupport
|
import TestSupport
|
||||||
|
import Testing
|
||||||
|
|
||||||
@Suite("PandocClientTests")
|
@Suite("PandocClientTests")
|
||||||
struct PandocClientTests: TestCase {
|
struct PandocClientTests: TestCase {
|
||||||
@@ -13,12 +13,12 @@ struct PandocClientTests: TestCase {
|
|||||||
|
|
||||||
static let expectedIncludeInHeaders = [
|
static let expectedIncludeInHeaders = [
|
||||||
"--include-in-header=/project/.build/head.tex",
|
"--include-in-header=/project/.build/head.tex",
|
||||||
"--include-in-header=/project/.build/footer.tex"
|
"--include-in-header=/project/.build/footer.tex",
|
||||||
]
|
]
|
||||||
|
|
||||||
static let expectedFiles = [
|
static let expectedFiles = [
|
||||||
"/project/.build/Report.md",
|
"/project/.build/Report.md",
|
||||||
"/project/.build/Definitions.md"
|
"/project/.build/Definitions.md",
|
||||||
]
|
]
|
||||||
|
|
||||||
static var sharedRunOptions: PandocClient.RunOptions {
|
static var sharedRunOptions: PandocClient.RunOptions {
|
||||||
@@ -49,7 +49,8 @@ struct PandocClientTests: TestCase {
|
|||||||
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).tex")
|
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).tex")
|
||||||
|
|
||||||
} assert: { output in
|
} assert: { output in
|
||||||
let expected = ["pandoc"]
|
let expected =
|
||||||
|
["pandoc"]
|
||||||
+ Self.expectedIncludeInHeaders
|
+ Self.expectedIncludeInHeaders
|
||||||
+ ["--output=\(Self.outputDirectory)/\(Self.defaultFileName).tex"]
|
+ ["--output=\(Self.outputDirectory)/\(Self.defaultFileName).tex"]
|
||||||
+ Self.expectedFiles
|
+ Self.expectedFiles
|
||||||
@@ -71,10 +72,11 @@ struct PandocClientTests: TestCase {
|
|||||||
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).html")
|
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).html")
|
||||||
|
|
||||||
} assert: { output in
|
} assert: { output in
|
||||||
let expected = ["pandoc"]
|
let expected = [
|
||||||
+ Self.expectedIncludeInHeaders
|
"pandoc",
|
||||||
+ ["--output=\(Self.outputDirectory)/\(Self.defaultFileName).html"]
|
"--output=\(Self.outputDirectory)/\(Self.defaultFileName).html",
|
||||||
+ Self.expectedFiles
|
"\(Self.projectDirectory)/.build/Report.tex",
|
||||||
|
]
|
||||||
|
|
||||||
#expect(output.arguments == expected)
|
#expect(output.arguments == expected)
|
||||||
}
|
}
|
||||||
@@ -83,7 +85,7 @@ struct PandocClientTests: TestCase {
|
|||||||
@Test(
|
@Test(
|
||||||
arguments: [
|
arguments: [
|
||||||
nil,
|
nil,
|
||||||
"lualatex"
|
"lualatex",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
func generatePdf(pdfEngine: String?) async throws {
|
func generatePdf(pdfEngine: String?) async throws {
|
||||||
@@ -94,11 +96,13 @@ struct PandocClientTests: TestCase {
|
|||||||
} run: {
|
} run: {
|
||||||
@Dependency(\.pandocClient) var pandocClient
|
@Dependency(\.pandocClient) var pandocClient
|
||||||
|
|
||||||
let output = try await pandocClient.run.generatePdf(Self.sharedRunOptions, pdfEngine: pdfEngine)
|
let output = try await pandocClient.run.generatePdf(
|
||||||
|
Self.sharedRunOptions, pdfEngine: pdfEngine)
|
||||||
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).pdf")
|
#expect(output == "\(Self.outputDirectory)/\(Self.defaultFileName).pdf")
|
||||||
|
|
||||||
} assert: { output in
|
} assert: { output in
|
||||||
let expected = ["pandoc"]
|
let expected =
|
||||||
|
["pandoc"]
|
||||||
+ Self.expectedIncludeInHeaders
|
+ Self.expectedIncludeInHeaders
|
||||||
+ ["--pdf-engine=\(pdfEngine ?? "xelatex")"]
|
+ ["--pdf-engine=\(pdfEngine ?? "xelatex")"]
|
||||||
+ ["--output=\(Self.outputDirectory)/\(Self.defaultFileName).pdf"]
|
+ ["--output=\(Self.outputDirectory)/\(Self.defaultFileName).pdf"]
|
||||||
@@ -147,10 +151,18 @@ struct TestPdfEngine: Sendable {
|
|||||||
static let testCases: [Self] = [
|
static let testCases: [Self] = [
|
||||||
.init(fileType: .html, expectedEngine: nil, configuration: .init(), defaults: .default),
|
.init(fileType: .html, expectedEngine: nil, configuration: .init(), defaults: .default),
|
||||||
.init(fileType: .latex, expectedEngine: nil, configuration: .init(), defaults: .default),
|
.init(fileType: .latex, expectedEngine: nil, configuration: .init(), defaults: .default),
|
||||||
.init(fileType: .pdf(engine: "lualatex"), expectedEngine: "lualatex", configuration: .init(), defaults: .default),
|
.init(
|
||||||
.init(fileType: .pdf(engine: nil), expectedEngine: "xelatex", configuration: .init(), defaults: .default),
|
fileType: .pdf(engine: "lualatex"), expectedEngine: "lualatex", configuration: .init(),
|
||||||
.init(fileType: .pdf(engine: nil), expectedEngine: "xelatex", configuration: .init(), defaults: .init()),
|
defaults: .default),
|
||||||
.init(fileType: .pdf(engine: nil), expectedEngine: "xelatex", configuration: .init(generate: .default), defaults: .init())
|
.init(
|
||||||
|
fileType: .pdf(engine: nil), expectedEngine: "xelatex", configuration: .init(),
|
||||||
|
defaults: .default),
|
||||||
|
.init(
|
||||||
|
fileType: .pdf(engine: nil), expectedEngine: "xelatex", configuration: .init(),
|
||||||
|
defaults: .init()),
|
||||||
|
.init(
|
||||||
|
fileType: .pdf(engine: nil), expectedEngine: "xelatex",
|
||||||
|
configuration: .init(generate: .default), defaults: .init()),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -174,19 +186,23 @@ struct TestParseFiles: Sendable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parsedFiles: [String] {
|
var parsedFiles: [String] {
|
||||||
let runOptions = self.runOptions ?? PandocClient.RunOptions(
|
let runOptions =
|
||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
self.runOptions
|
||||||
projectDirectory: nil,
|
?? PandocClient.RunOptions(
|
||||||
quiet: true,
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
||||||
shouldBuild: false
|
projectDirectory: nil,
|
||||||
)
|
quiet: true,
|
||||||
|
shouldBuild: false
|
||||||
|
)
|
||||||
|
|
||||||
return runOptions.parseFiles(configuration.generate, defaults)
|
return runOptions.parseFiles(configuration.generate, defaults)
|
||||||
}
|
}
|
||||||
|
|
||||||
static let testCases: [Self] = [
|
static let testCases: [Self] = [
|
||||||
.init(expectedFiles: ["Report.md", "Definitions.md"]),
|
.init(expectedFiles: ["Report.md", "Definitions.md"]),
|
||||||
.init(expectedFiles: ["Report.md", "Definitions.md"], configuration: .init(generate: .default), defaults: .init()),
|
.init(
|
||||||
|
expectedFiles: ["Report.md", "Definitions.md"], configuration: .init(generate: .default),
|
||||||
|
defaults: .init()),
|
||||||
.init(expectedFiles: [], defaults: .init()),
|
.init(expectedFiles: [], defaults: .init()),
|
||||||
.init(
|
.init(
|
||||||
expectedFiles: ["custom.md"],
|
expectedFiles: ["custom.md"],
|
||||||
@@ -199,7 +215,7 @@ struct TestParseFiles: Sendable {
|
|||||||
quiet: true,
|
quiet: true,
|
||||||
shouldBuild: false
|
shouldBuild: false
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,16 +239,20 @@ struct TestParseIncludeInHeaderFiles: Sendable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parsedFiles: [String] {
|
var parsedFiles: [String] {
|
||||||
let runOptions = self.runOptions ?? PandocClient.RunOptions(
|
let runOptions =
|
||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
self.runOptions
|
||||||
)
|
?? PandocClient.RunOptions(
|
||||||
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
||||||
|
)
|
||||||
|
|
||||||
return runOptions.parseIncludeInHeader(configuration.generate, defaults)
|
return runOptions.parseIncludeInHeader(configuration.generate, defaults)
|
||||||
}
|
}
|
||||||
|
|
||||||
static let testCases: [Self] = [
|
static let testCases: [Self] = [
|
||||||
.init(expectedHeaderFiles: ["head.tex", "footer.tex"]),
|
.init(expectedHeaderFiles: ["head.tex", "footer.tex"]),
|
||||||
.init(expectedHeaderFiles: ["head.tex", "footer.tex"], configuration: .init(generate: .default), defaults: .init()),
|
.init(
|
||||||
|
expectedHeaderFiles: ["head.tex", "footer.tex"], configuration: .init(generate: .default),
|
||||||
|
defaults: .init()),
|
||||||
.init(expectedHeaderFiles: [], defaults: .init()),
|
.init(expectedHeaderFiles: [], defaults: .init()),
|
||||||
.init(
|
.init(
|
||||||
expectedHeaderFiles: ["custom.tex"],
|
expectedHeaderFiles: ["custom.tex"],
|
||||||
@@ -242,7 +262,7 @@ struct TestParseIncludeInHeaderFiles: Sendable {
|
|||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
||||||
includeInHeader: ["custom.tex"]
|
includeInHeader: ["custom.tex"]
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -266,9 +286,11 @@ struct TestParseOutputFileName: Sendable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parsedFileName: String {
|
var parsedFileName: String {
|
||||||
let runOptions = self.runOptions ?? PandocClient.RunOptions(
|
let runOptions =
|
||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
self.runOptions
|
||||||
)
|
?? PandocClient.RunOptions(
|
||||||
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
||||||
|
)
|
||||||
|
|
||||||
return runOptions.parseOutputFileName(configuration.generate, defaults)
|
return runOptions.parseOutputFileName(configuration.generate, defaults)
|
||||||
}
|
}
|
||||||
@@ -285,7 +307,7 @@ struct TestParseOutputFileName: Sendable {
|
|||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug),
|
||||||
outputFileName: "custom"
|
outputFileName: "custom"
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -309,9 +331,11 @@ struct TestParseBuildDirectory: Sendable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parsedBuildDirectory: String {
|
var parsedBuildDirectory: String {
|
||||||
let runOptions = self.runOptions ?? PandocClient.RunOptions(
|
let runOptions =
|
||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
self.runOptions
|
||||||
)
|
?? PandocClient.RunOptions(
|
||||||
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
||||||
|
)
|
||||||
|
|
||||||
return runOptions.parseBuildDirectory(configuration.generate, defaults)
|
return runOptions.parseBuildDirectory(configuration.generate, defaults)
|
||||||
}
|
}
|
||||||
@@ -328,6 +352,6 @@ struct TestParseBuildDirectory: Sendable {
|
|||||||
buildDirectory: "custom",
|
buildDirectory: "custom",
|
||||||
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
loggingOptions: .init(commandName: "parseFiles", logLevel: .debug)
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -145,6 +145,7 @@ struct PlaybookClientTests: TestCase {
|
|||||||
@Test
|
@Test
|
||||||
func generateTemplate() async throws {
|
func generateTemplate() async throws {
|
||||||
try await withCapturingCommandClient("generateTemplate") {
|
try await withCapturingCommandClient("generateTemplate") {
|
||||||
|
$0.fileClient.isDirectory = { _ in true }
|
||||||
$0.configurationClient = .mock()
|
$0.configurationClient = .mock()
|
||||||
$0.playbookClient = .liveValue
|
$0.playbookClient = .liveValue
|
||||||
} run: {
|
} run: {
|
||||||
@@ -180,6 +181,7 @@ struct PlaybookClientTests: TestCase {
|
|||||||
operation: @Sendable @escaping () async throws -> Void
|
operation: @Sendable @escaping () async throws -> Void
|
||||||
) async rethrows {
|
) async rethrows {
|
||||||
try await withDependencies {
|
try await withDependencies {
|
||||||
|
$0.fileClient.isDirectory = { _ in true }
|
||||||
$0.configurationClient = .mock(configuration)
|
$0.configurationClient = .mock(configuration)
|
||||||
$0.commandClient = .capturing(capturing)
|
$0.commandClient = .capturing(capturing)
|
||||||
$0.playbookClient = .liveValue
|
$0.playbookClient = .liveValue
|
||||||
|
|||||||
@@ -2,22 +2,76 @@
|
|||||||
# Build the executable
|
# Build the executable
|
||||||
ARG SWIFT_IMAGE_VERSION="6.0.3"
|
ARG SWIFT_IMAGE_VERSION="6.0.3"
|
||||||
|
|
||||||
FROM swift:${SWIFT_IMAGE_VERSION} AS build
|
# ============================================================
|
||||||
|
# Build Swift Image
|
||||||
|
# ============================================================
|
||||||
|
FROM docker.io/swift:${SWIFT_IMAGE_VERSION} AS build
|
||||||
|
|
||||||
|
# Install OS updates
|
||||||
|
RUN export DEBIAN_FRONTEND=nointeractive DEBCONF_NOINTERACTIVE_SEEN=true && \
|
||||||
|
apt-get -q update && \
|
||||||
|
apt-get -q dist-upgrade -y && \
|
||||||
|
apt-get install -y libjemalloc-dev
|
||||||
|
|
||||||
WORKDIR /build
|
WORKDIR /build
|
||||||
|
|
||||||
|
# Resolve dependencies, this creates a cached layer.
|
||||||
COPY ./Package.* ./
|
COPY ./Package.* ./
|
||||||
RUN swift package resolve
|
RUN --mount=type=cache,target=/build/.build swift package resolve
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN swift build -c release -Xswiftc -g
|
|
||||||
|
|
||||||
# Run image
|
# Build the application.
|
||||||
FROM swift:${SWIFT_IMAGE_VERSION}-slim
|
RUN --mount=type=cache,target=/build/.build \
|
||||||
|
swift build -c release \
|
||||||
|
--product hpa \
|
||||||
|
--static-swift-stdlib \
|
||||||
|
-Xlinker -ljemalloc
|
||||||
|
|
||||||
RUN export DEBIAN_FRONTEND=nointeractive DEBCONF_NOINTERACTIVE_SEEN=true && apt-get -q update && \
|
# Switch to staging area.
|
||||||
|
WORKDIR /staging
|
||||||
|
|
||||||
|
# Copy main executable to staging area.
|
||||||
|
RUN --mount=type=cache,target=/build/.build \
|
||||||
|
cp "$(swift build --package-path /build -c release --show-bin-path)/hpa" ./
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# Run Image
|
||||||
|
# ============================================================
|
||||||
|
FROM docker.io/ubuntu:noble
|
||||||
|
|
||||||
|
# Update base image and install needed packages.
|
||||||
|
#
|
||||||
|
# NOTE: NB: Installs vim as minimal text editor to use inside the container, bc
|
||||||
|
# when I mount my home directory / use my neovim config it requires
|
||||||
|
# neovim v11+, but generally only going to edit ansible vault files
|
||||||
|
# inside the container.
|
||||||
|
RUN export DEBIAN_FRONTEND=nointeractive DEBCONF_NOINTERACTIVE_SEEN=true && \
|
||||||
|
apt-get -q update && \
|
||||||
|
apt-get -q dist-upgrade -y && \
|
||||||
apt-get -q install -y \
|
apt-get -q install -y \
|
||||||
ansible \
|
ansible \
|
||||||
|
curl \
|
||||||
|
imagemagick \
|
||||||
pandoc \
|
pandoc \
|
||||||
texlive \
|
texlive \
|
||||||
|
texlive-xetex \
|
||||||
|
libjemalloc2 \
|
||||||
|
libcurl4 \
|
||||||
|
tzdata \
|
||||||
|
vim \
|
||||||
&& rm -r /var/lib/apt/lists/*
|
&& rm -r /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY --from=build /build/.build/release/hpa /usr/local/bin
|
# Install the hpa executable.
|
||||||
CMD ["/bin/bash", "-xc", "/usr/local/bin/hpa"]
|
COPY --from=build /staging/hpa /usr/local/bin
|
||||||
|
|
||||||
|
# Install the entrypoint script and make execuatable.
|
||||||
|
COPY docker/entrypoint.sh /entrypoint.sh
|
||||||
|
RUN chmod +x /entrypoint.sh && mkdir /root/project
|
||||||
|
|
||||||
|
# Set workdir and volume mounts.
|
||||||
|
WORKDIR /root/project
|
||||||
|
VOLUME /root/project
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||||
|
CMD ["--help"]
|
||||||
|
|||||||
@@ -2,6 +2,11 @@
|
|||||||
ARG SWIFT_IMAGE_VERSION="6.0.3"
|
ARG SWIFT_IMAGE_VERSION="6.0.3"
|
||||||
FROM swift:${SWIFT_IMAGE_VERSION}
|
FROM swift:${SWIFT_IMAGE_VERSION}
|
||||||
|
|
||||||
|
RUN export DEBIAN_FRONTEND=nointeractive DEBCONF_NOINTERACTIVE_SEEN=true && apt-get -q update && \
|
||||||
|
apt-get -q install -y \
|
||||||
|
curl \
|
||||||
|
&& rm -r /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY ./Package.* ./
|
COPY ./Package.* ./
|
||||||
RUN swift package resolve
|
RUN swift package resolve
|
||||||
|
|||||||
27
docker/entrypoint.sh
Normal file
27
docker/entrypoint.sh
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
declare -a args
|
||||||
|
|
||||||
|
# Allows to attach to a shell inside the container, or run ansbile commands,
|
||||||
|
# otherwise run the 'hpa' script with the given arguments.
|
||||||
|
#
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
if [[ $1 == "/bin/bash" ]] || [[ $1 == "bash" ]]; then
|
||||||
|
shift
|
||||||
|
/bin/bash "$@"
|
||||||
|
exit $?
|
||||||
|
elif [[ $1 == "/bin/sh" ]] || [[ $1 == "sh" ]]; then
|
||||||
|
shift
|
||||||
|
/bin/sh "$@"
|
||||||
|
exit $?
|
||||||
|
elif [[ $1 =~ ^ansible ]]; then
|
||||||
|
exec "$@"
|
||||||
|
exit $?
|
||||||
|
else
|
||||||
|
args+=("$1")
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
# If we made it here then run the hpa script.
|
||||||
|
/usr/local/bin/hpa "${args[@]}"
|
||||||
51
justfile
51
justfile
@@ -2,37 +2,73 @@ docker_image_name := "swift-hpa"
|
|||||||
install_path := "~/.local/share/bin/hpa"
|
install_path := "~/.local/share/bin/hpa"
|
||||||
completion_path := "~/.local/share/zsh/completions/_hpa"
|
completion_path := "~/.local/share/zsh/completions/_hpa"
|
||||||
|
|
||||||
build mode="debug":
|
tap_url := "https://git.housh.dev/michael/homebrew-formula"
|
||||||
swift build -c {{mode}}
|
tap := "michael/formula"
|
||||||
|
formula := "hpa"
|
||||||
|
|
||||||
|
release_base_url := "https://git.housh.dev/michael/swift-hpa/archive"
|
||||||
|
|
||||||
|
# Build and bottle homebrew formula.
|
||||||
|
bottle:
|
||||||
|
@brew uninstall {{formula}} || true
|
||||||
|
@brew tap {{tap}} {{tap_url}}
|
||||||
|
@brew install --build-bottle {{tap}}/{{formula}}
|
||||||
|
@brew bottle {{formula}}
|
||||||
|
bottle="$(ls *.gz)" && mv "${bottle}" "${bottle/--/-}"
|
||||||
|
|
||||||
|
# Build the command-line tool.
|
||||||
|
build configuration="debug" arch="arm64":
|
||||||
|
swift build \
|
||||||
|
--disable-sandbox \
|
||||||
|
--configuration {{configuration}} \
|
||||||
|
--arch {{arch}}
|
||||||
|
|
||||||
alias b := build
|
alias b := build
|
||||||
|
|
||||||
|
# Build the docker image.
|
||||||
build-docker file="Dockerfile" tag="latest":
|
build-docker file="Dockerfile" tag="latest":
|
||||||
@docker build \
|
@docker build \
|
||||||
--file docker/{{file}} \
|
--file docker/{{file}} \
|
||||||
--tag {{docker_image_name}}:{{tag}} .
|
--tag {{docker_image_name}}:{{tag}} .
|
||||||
|
|
||||||
|
# Build the docker test image used for testing.
|
||||||
build-docker-test: (build-docker "Dockerfile.test" "test")
|
build-docker-test: (build-docker "Dockerfile.test" "test")
|
||||||
|
|
||||||
|
build-universal-binary: (build "release" "arm64") (build "release" "x86_64")
|
||||||
|
@lipo -create -output {{formula}} \
|
||||||
|
".build/arm64-apple-macosx/release/hpa" \
|
||||||
|
".build/x86_64-apple-macosx/release/hpa"
|
||||||
|
|
||||||
|
# Run tests.
|
||||||
test *ARGS:
|
test *ARGS:
|
||||||
swift test {{ARGS}}
|
swift test {{ARGS}}
|
||||||
|
|
||||||
alias t := test
|
alias t := test
|
||||||
|
|
||||||
|
# Run tests in docker container.
|
||||||
test-docker *ARGS: (build-docker-test)
|
test-docker *ARGS: (build-docker-test)
|
||||||
@docker run --rm \
|
@docker run --rm \
|
||||||
--network host \
|
--network host \
|
||||||
{{docker_image_name}}:test \
|
{{docker_image_name}}:test \
|
||||||
swift test {{ARGS}}
|
swift test {{ARGS}}
|
||||||
|
|
||||||
|
alias td := test-docker
|
||||||
|
|
||||||
|
# Remove bottles
|
||||||
|
remove-bottles:
|
||||||
|
rm -rf *.gz
|
||||||
|
|
||||||
|
# Run the application.
|
||||||
run *ARGS:
|
run *ARGS:
|
||||||
swift run hpa {{ARGS}}
|
swift run hpa {{ARGS}}
|
||||||
|
|
||||||
alias r := run
|
alias r := run
|
||||||
|
|
||||||
|
# Clean the build folder.
|
||||||
clean:
|
clean:
|
||||||
rm -rf .build
|
rm -rf .build
|
||||||
|
|
||||||
|
# Bump the version based on the git tag.
|
||||||
update-version:
|
update-version:
|
||||||
@swift package \
|
@swift package \
|
||||||
--disable-sandbox \
|
--disable-sandbox \
|
||||||
@@ -40,6 +76,11 @@ update-version:
|
|||||||
update-version \
|
update-version \
|
||||||
hpa
|
hpa
|
||||||
|
|
||||||
install: (build "release")
|
# Get the sha256 sum of the release and copy to clipboard.
|
||||||
@cp .build/release/hpa {{install_path}}
|
get-release-sha prefix="": (build "release")
|
||||||
@{{install_path}} --generate-completion-script zsh > {{completion_path}}
|
version=$(.build/release/hpa --version) && \
|
||||||
|
url="{{release_base_url}}/{{prefix}}${version}.tar.gz" && \
|
||||||
|
sha=$(curl "$url" | shasum -a 256) && \
|
||||||
|
stripped="${sha% *}" && \
|
||||||
|
echo "$stripped" | pbcopy && \
|
||||||
|
echo "Copied sha to clipboard: $stripped"
|
||||||
|
|||||||
Reference in New Issue
Block a user