Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/littlefs-project/littlefs.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristopher Haster <chaster@utexas.edu>2020-12-20 11:03:20 +0300
committerChristopher Haster <chaster@utexas.edu>2021-01-10 08:42:49 +0300
commitb2235e956dda7e69fc9048c1768fcfce45c913b9 (patch)
treed9853565bcde344817cc5919bf6536383e9c734a
parentd804c2d3b7389b6508993223f797d84720803c6c (diff)
Added GitHub workflows to run tests
Mostly taken from .travis.yml, biggest changes were around how to get the status updates to work. We can't use a token on PRs the same way we could in Travis, so instead we use a second workflow that checks every pull request for "status" artifacts, and create the actual statuses in the "workflow_run" event, where we have full access to repo secrets.
-rw-r--r--.github/workflows/status.yml97
-rw-r--r--.github/workflows/test.yml238
-rw-r--r--Makefile11
-rw-r--r--lfs.c4
-rwxr-xr-xscripts/code.py (renamed from scripts/code_size.py)2
-rwxr-xr-xscripts/test.py4
6 files changed, 345 insertions, 11 deletions
diff --git a/.github/workflows/status.yml b/.github/workflows/status.yml
new file mode 100644
index 0000000..8864b29
--- /dev/null
+++ b/.github/workflows/status.yml
@@ -0,0 +1,97 @@
+name: status
+on:
+ workflow_run:
+ workflows: test
+ types: completed
+
+jobs:
+ status:
+ runs-on: ubuntu-latest
+ continue-on-error: true
+
+ steps:
+ - run: echo "${{toJSON(github.event.workflow_run)}}"
+
+ # custom statuses?
+ - uses: dawidd6/action-download-artifact@v2
+ with:
+ workflow: ${{github.event.workflow_run.name}}
+ run_id: ${{github.event.workflow_run.id}}
+ name: status
+ path: status
+ - name: update-status
+ run: |
+ # TODO remove this
+ ls status
+ for f in status/*.json
+ do
+ cat $f
+ done
+
+ shopt -s nullglob
+ for s in status/*.json
+ do
+ # parse requested status
+ export STATE="$(jq -er '.state' $s)"
+ export CONTEXT="$(jq -er '.context' $s)"
+ export DESCRIPTION="$(jq -er '.description' $s)"
+ # help lookup URL for job/steps because GitHub makes
+ # it VERY HARD to link to specific jobs
+ export TARGET_URL="$(
+ jq -er '.target_url // empty' $s || (
+ export TARGET_JOB="$(jq -er '.target_job' $s)"
+ export TARGET_STEP="$(jq -er '.target_step // ""' $s)"
+ curl -sS -H "authorization: token ${{secrets.GITHUB_TOKEN}}" \
+ "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/actions/runs/`
+ `${{github.event.workflow_run.id}}/jobs" \
+ | jq -er '.jobs[]
+ | select(.name == env.TARGET_JOB)
+ | .html_url + ((.steps[]
+ | select(.name == env.TARGET_STEP)
+ | "#step:\(.number):0") // "")'
+ )
+ )"
+ # TODO remove this
+ # print for debugging
+ echo "$(jq -nc '{
+ state: env.STATE,
+ context: env.CONTEXT,
+ description: env.DESCRIPTION,
+ target_url: env.TARGET_URL}')"
+ # update status
+ curl -sS -H "authorization: token ${{secrets.GITHUB_TOKEN}}" \
+ -X POST \
+ "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/`
+ `${{github.event.workflow_run.head_sha}}" \
+ -d "$(jq -nc '{
+ state: env.STATE,
+ context: env.CONTEXT,
+ description: env.DESCRIPTION,
+ target_url: env.TARGET_URL}')"
+
+ #if jq -er '.target_url' $s
+ #then
+ # export TARGET_URL="$(jq -er '.target_url' $s)"
+ #elif jq -er '.target_job' $s
+ #then
+ #
+ #fi
+
+ done
+
+
+
+
+# - id: status
+# run: |
+# echo "::set-output name=description::$(cat statuses/x86_64.txt | tr '\n' ' ')"
+# - uses: octokit/request-action@v2.x
+# with:
+# route: POST /repos/{repo}/status/{sha}
+# repo: ${{github.repository}}
+# sha: ${{github.event.status.sha}}
+# context: ${{github.event.status.context}}
+# state: ${{github.event.status.state}}
+# description: ${{steps.status.outputs.description}}
+# target_url: ${{github.event.status.target_url}}
+#
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..8f36493
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,238 @@
+name: test
+on: [push, pull_request]
+
+env:
+ CFLAGS: -Werror
+ MAKEFLAGS: -j
+
+jobs:
+ # run tests
+ test:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ arch: [x86_64, thumb, mips, powerpc]
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: install
+ run: |
+ # need toml, also pip3 isn't installed by default?
+ sudo apt-get update
+ sudo apt-get install python3 python3-pip
+ sudo pip3 install toml
+ mkdir status
+ # cross-compile with ARM Thumb (32-bit, little-endian)
+ - name: install-thumb
+ if: matrix.arch == 'thumb'
+ run: |
+ sudo apt-get install \
+ gcc-arm-linux-gnueabi \
+ libc6-dev-armel-cross \
+ qemu-user
+ echo "CC=arm-linux-gnueabi-gcc -mthumb --static" >> $GITHUB_ENV
+ echo "EXEC=qemu-arm" >> $GITHUB_ENV
+ arm-linux-gnueabi-gcc --version
+ qemu-arm -version
+ # cross-compile with MIPS (32-bit, big-endian)
+ - name: install-mips
+ if: matrix.arch == 'mips'
+ run: |
+ sudo apt-get install \
+ gcc-mips-linux-gnu \
+ libc6-dev-mips-cross \
+ qemu-user
+ echo "CC=mips-linux-gnu-gcc --static" >> $GITHUB_ENV
+ echo "EXEC=qemu-mips" >> $GITHUB_ENV
+ mips-linux-gnu-gcc --version
+ qemu-mips -version
+ # cross-compile with PowerPC (32-bit, big-endian)
+ - name: install-powerpc
+ if: matrix.arch == 'powerpc'
+ run: |
+ sudo apt-get install \
+ gcc-powerpc-linux-gnu \
+ libc6-dev-powerpc-cross \
+ qemu-user
+ echo "CC=powerpc-linux-gnu-gcc --static" >> $GITHUB_ENV
+ echo "EXEC=qemu-ppc" >> $GITHUB_ENV
+ powerpc-linux-gnu-gcc --version
+ qemu-ppc -version
+ # test configurations
+ # make sure example can at least compile
+ - name: test-example
+ run: |
+ sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c && \
+ make all CFLAGS+=" \
+ -Duser_provided_block_device_read=NULL \
+ -Duser_provided_block_device_prog=NULL \
+ -Duser_provided_block_device_erase=NULL \
+ -Duser_provided_block_device_sync=NULL \
+ -include stdio.h"
+# # normal+reentrant tests
+# - name: test-default
+# run: make test SCRIPTFLAGS+="-nrk"
+# # NOR flash: read/prog = 1 block = 4KiB
+# - name: test-nor
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096"
+# # SD/eMMC: read/prog = 512 block = 512
+# - name: test-emmc
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512"
+# # NAND flash: read/prog = 4KiB block = 32KiB
+# - name: test-nand
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)"
+# # other extreme geometries that are useful for various corner cases
+# - name: test-no-intrinsics
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_NO_INTRINSICS"
+# - name: test-byte-writes
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1"
+# - name: test-block-cycles
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_BLOCK_CYCLES=1"
+# - name: test-odd-block-count
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256"
+# - name: test-odd-block-size
+# run: make test SCRIPTFLAGS+="-nrk
+# -DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704"
+
+ # update results
+ - uses: actions/checkout@v2
+ if: github.ref != 'refs/heads/master'
+ continue-on-error: true
+ with:
+ ref: master
+ path: master
+
+ - name: results-code
+ continue-on-error: true
+ run: |
+ export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
+ export CFLAGS+=" \
+ -DLFS_NO_ASSERT \
+ -DLFS_NO_DEBUG \
+ -DLFS_NO_WARN \
+ -DLFS_NO_ERROR"
+ if [ -d master ]
+ then
+ make -C master clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-qo code.csv" \
+ && export SCRIPTFLAGS+="-d master/code.csv"
+ fi
+ make clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-o code.csv"
+ - name: results-code-readonly
+ continue-on-error: true
+ run: |
+ export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
+ export CFLAGS+=" \
+ -DLFS_NO_ASSERT \
+ -DLFS_NO_DEBUG \
+ -DLFS_NO_WARN \
+ -DLFS_NO_ERROR \
+ -DLFS_READONLY"
+ if [ -d master ]
+ then
+ make -C master clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-qo code-readonly.csv" \
+ && export SCRIPTFLAGS+="-d master/code-readonly.csv"
+ fi
+ # TODO remove this OBJ
+ make clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-o code-readonly.csv"
+ - name: results-code-threadsafe
+ continue-on-error: true
+ run: |
+ export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
+ export CFLAGS+=" \
+ -DLFS_NO_ASSERT \
+ -DLFS_NO_DEBUG \
+ -DLFS_NO_WARN \
+ -DLFS_NO_ERROR \
+ -DLFS_THREADSAFE"
+ if [ -d master ]
+ then
+ make -C master clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-qo code-threadsafe.csv" \
+ && export SCRIPTFLAGS+="-d master/code-threadsafe.csv"
+ fi
+ make clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-o code-threadsafe.csv"
+ - name: results-code-migrate
+ continue-on-error: true
+ run: |
+ export OBJ="$(ls lfs*.c | sed 's/\.c/\.o/' | tr '\n' ' ')"
+ export CFLAGS+=" \
+ -DLFS_NO_ASSERT \
+ -DLFS_NO_DEBUG \
+ -DLFS_NO_WARN \
+ -DLFS_NO_ERROR \
+ -DLFS_MIGRATE"
+ if [ -d master ]
+ then
+ make -C master clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-qo code-migrate.csv" \
+ && export SCRIPTFLAGS+="-d master/code-migrate.csv"
+ fi
+ make clean code OBJ="$OBJ" \
+ SCRIPTFLAGS+="-o code-migrate.csv"
+ # limit reporting to Thumb, otherwise there would be too many numbers
+ # flying around for the results to be easily readable
+ - name: collect-status
+ continue-on-error: true
+ if: matrix.arch == 'thumb'
+ run: |
+ mkdir -p status
+ shopt -s nullglob
+ for f in code*.csv
+ do
+ export STEP="results-code$(
+ echo $f | sed -n 's/code-\(.*\).csv/-\1/p')"
+ export CONTEXT="results / code$(
+ echo $f | sed -n 's/code-\(.*\).csv/ (\1)/p')"
+ export DESCRIPTION="Code size is $(
+ ./scripts/code.py -i $f -S $(
+ [ -e master/$f ] && echo "-d master/$f"))"
+ jq -nc '{
+ state: "success",
+ context: env.CONTEXT,
+ description: env.DESCRIPTION,
+ target_job: "test (${{matrix.arch}})",
+ target_step: env.STEP}' \
+ > status/code$(echo $f | sed -n 's/code-\(.*\).csv/-\1/p').json
+ done
+ - name: upload-status
+ continue-on-error: true
+ if: matrix.arch == 'thumb'
+ uses: actions/upload-artifact@v2
+ with:
+ name: status
+ path: status
+ retention-days: 1
+
+ # run under Valgrind to check for memory errors
+ valgrind:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: install
+ run: |
+ # need toml, also pip3 isn't installed by default?
+ sudo apt-get update
+ sudo apt-get install python3 python3-pip
+ sudo pip3 install toml
+ - name: install-valgrind
+ run: |
+ sudo apt-get update
+ sudo apt-get install valgrind
+ valgrind --version
+# # normal tests, we don't need to test all geometries
+# - name: test-valgrind
+# run: make test SCRIPTFLAGS+="-k --valgrind"
diff --git a/Makefile b/Makefile
index e5107bb..0cf3327 100644
--- a/Makefile
+++ b/Makefile
@@ -29,8 +29,7 @@ override CFLAGS += -std=c99 -Wall -pedantic
override CFLAGS += -Wextra -Wshadow -Wjump-misses-init -Wundef
ifdef VERBOSE
-override TFLAGS += -v
-override SFLAGS += -v
+override SCRIPTFLAGS += -v
endif
@@ -41,14 +40,14 @@ asm: $(ASM)
size: $(OBJ)
$(SIZE) -t $^
-code_size:
- ./scripts/code_size.py $(SFLAGS)
+code:
+ ./scripts/code.py $(SCRIPTFLAGS)
test:
- ./scripts/test.py $(TFLAGS)
+ ./scripts/test.py $(EXEC:%=--exec=%) $(SCRIPTFLAGS)
.SECONDEXPANSION:
test%: tests/test$$(firstword $$(subst \#, ,%)).toml
- ./scripts/test.py $@ $(TFLAGS)
+ ./scripts/test.py $@ $(EXEC:%=--exec=%) $(SCRIPTFLAGS)
-include $(DEP)
diff --git a/lfs.c b/lfs.c
index d7439fe..a6bfbf8 100644
--- a/lfs.c
+++ b/lfs.c
@@ -4723,7 +4723,7 @@ static int lfs_rawmigrate(lfs_t *lfs, const struct lfs_config *cfg) {
lfs1_entry_tole32(&entry1.d);
err = lfs_dir_commit(lfs, &dir2, LFS_MKATTRS(
- {LFS_MKTAG(LFS_TYPE_CREATE, id, 0)},
+ {LFS_MKTAG(LFS_TYPE_CREATE, id, 0), NULL},
{LFS_MKTAG_IF_ELSE(isdir,
LFS_TYPE_DIR, id, entry1.d.nlen,
LFS_TYPE_REG, id, entry1.d.nlen),
@@ -4828,7 +4828,7 @@ static int lfs_rawmigrate(lfs_t *lfs, const struct lfs_config *cfg) {
lfs_superblock_tole32(&superblock);
err = lfs_dir_commit(lfs, &dir2, LFS_MKATTRS(
- {LFS_MKTAG(LFS_TYPE_CREATE, 0, 0)},
+ {LFS_MKTAG(LFS_TYPE_CREATE, 0, 0), NULL},
{LFS_MKTAG(LFS_TYPE_SUPERBLOCK, 0, 8), "littlefs"},
{LFS_MKTAG(LFS_TYPE_INLINESTRUCT, 0, sizeof(superblock)),
&superblock}));
diff --git a/scripts/code_size.py b/scripts/code.py
index da2dee8..46459a5 100755
--- a/scripts/code_size.py
+++ b/scripts/code.py
@@ -40,7 +40,7 @@ $(foreach target,$(SRC),$(eval $(FLATTEN)))
-include %(sizedir)s/*.d
.SECONDARY:
-%%.size: $(foreach t,$(subst /,.,$(SRC:.c=.size)),%%.$t)
+%%.size: $(foreach t,$(subst /,.,$(OBJ:.o=.size)),%%.$t)
cat $^ > $@
"""
CATS = {
diff --git a/scripts/test.py b/scripts/test.py
index e5869c2..0ed2099 100755
--- a/scripts/test.py
+++ b/scripts/test.py
@@ -31,7 +31,7 @@ $(foreach target,$(SRC),$(eval $(FLATTEN)))
-include tests/*.d
.SECONDARY:
-%.test: %.test.o $(foreach f,$(subst /,.,$(SRC:.c=.o)),%.$f)
+%.test: %.test.o $(foreach f,$(subst /,.,$(OBJ)),%.$f)
$(CC) $(CFLAGS) $^ $(LFLAGS) -o $@
"""
GLOBALS = """
@@ -771,7 +771,7 @@ if __name__ == "__main__":
help="Run reentrant tests with simulated power-loss.")
parser.add_argument('-V', '--valgrind', action='store_true',
help="Run non-leaky tests under valgrind to check for memory leaks.")
- parser.add_argument('-e', '--exec', default=[], type=lambda e: e.split(' '),
+ parser.add_argument('-e', '--exec', default=[], type=lambda e: e.split(),
help="Run tests with another executable prefixed on the command line.")
parser.add_argument('-d', '--disk',
help="Specify a file to use for persistent/reentrant tests.")