@@ -17,3 +17,157 @@ Authors of the data set are:
1717- Supervisors who actively contributed ideas to the delta between the last and this major distribution release
1818
1919Order of authors: maintainers are listed first followed by everybody else, both in alphabetical order.
20+
21+ ### Automation
22+
23+ 1 . Install [ EasyDataverse] ( https://github.com/gdcc/easyDataverse )
24+
25+ As of March 2024, a specific branch was required:
26+
27+ ``` bash
28+ pip install git+https://github.com/gdcc/easyDataverse@flexible-connect
29+ ```
30+
31+ These new required features are, however, scheduled to be released. Likely, installation from PyPI is possible in the future.
32+
33+ 2. Download previous distribution
34+
35+ ` ` ` python
36+ from easyDataverse import Dataverse
37+
38+ SERVER_URL = " https://darus.uni-stuttgart.de"
39+
40+ # get the token from DaRUS after logging in; valid for one year
41+ API_TOKEN = " yourToken"
42+
43+ # DOI of preCICE Distribution Version v2211.0
44+ PID = " doi:10.18419/darus-3576"
45+
46+ darus = Dataverse(SERVER_URL, API_TOKEN)
47+ # pull data and metadata, data is written into subfolder old_files, which is also created
48+ dataset = darus.load_dataset(PID, filedir=" old_files" )
49+
50+ # write metadata into a json file
51+ with open(" dataset.json" , " w" ) as f:
52+ f.write(dataset.json ())
53+ ` ` `
54+
55+ 3. Edit metadata manually
56+
57+ Copy ` dataset.json` to ` dataset_new.json` and update manually. Keep the old ` dataset_id` .
58+
59+ 4. Download components
60+
61+ ` ` ` bash
62+ #! /bin/bash
63+
64+ repo_names=()
65+ is_releases=() # whether a component is a release (true) or a commit (false)
66+ versions=() # for a release, this is the version number (without "v"); for a commit, this is the hash (of length 7)
67+
68+ # TODO: update all version; current example reflects distribution v2404.0
69+
70+ # preCICE
71+ repo_names+=("precice"); is_releases+=(true); versions+=("3.1.1")
72+
73+ # Tools
74+ repo_names+=("aste"); is_releases+=(true); versions+=("3.1.0")
75+ repo_names+=("ci-images"); is_releases+=(false); versions+=("b421a49")
76+ repo_names+=("config-visualizer"); is_releases+=(true); versions+=("1.1.3")
77+ repo_names+=("config-visualizer-gui"); is_releases+=(true); versions+=("0.1.0")
78+ repo_names+=("fmi-runner"); is_releases+=(true); versions+=("0.2.1")
79+ repo_names+=("micro-manager"); is_releases+=(true); versions+=("0.4.0")
80+
81+ # Bindings
82+ repo_names+=("fortran-module"); is_releases+=(false); versions+=("dc88c3b")
83+ repo_names+=("PreCICE.jl"); is_releases+=(true); versions+=("3.1.0")
84+ repo_names+=("matlab-bindings"); is_releases+=(true); versions+=("3.1.0")
85+ repo_names+=("python-bindings"); is_releases+=(true); versions+=("3.1.0")
86+ repo_names+=("rust-bindings"); is_releases+=(true); versions+=("3.1.0")
87+
88+ # Adapters
89+ repo_names+=("calculix-adapter"); is_releases+=(true); versions+=("2.20.1")
90+ repo_names+=("code_aster-adapter"); is_releases+=(false); versions+=("b797fcc")
91+ repo_names+=("dealii-adapter"); is_releases+=(false); versions+=("4c6d092")
92+ repo_names+=("dune-adapter"); is_releases+=(false); versions+=("75edcc3")
93+ repo_names+=("dumux-adapter"); is_releases+=(true); versions+=("2.0.0")
94+ repo_names+=("fenics-adapter"); is_releases+=(true); versions+=("2.1.0")
95+ repo_names+=("openfoam-adapter"); is_releases+=(true); versions+=("1.3.0")
96+ repo_names+=("su2-adapter"); is_releases+=(false); versions+=("64d4aff")
97+
98+ # Tutorials
99+ repo_names+=("tutorials"); is_releases+=(true); versions+=("202404.0")
100+
101+ # VM
102+ repo_names+=("vm"); is_releases+=(true); versions+=("202404.0.0")
103+
104+ # Website and documentation
105+ repo_names+=("precice.github.io"); is_releases+=(true); versions+=("202404.0.0")
106+
107+ download_release () {
108+ repo_name=$1
109+ version=$2
110+ wget -nv -O ${repo_name}-${version}.tar.gz https://github.com/precice/${repo_name}/archive/refs/tags/v${version}.tar.gz
111+ }
112+
113+ download_commit () {
114+ repo_name=$1
115+ hash=$2
116+
117+ wget -nv https://github.com/precice/${repo_name}/archive/${hash}.zip
118+ unzip -q ${hash}.zip
119+ rm ${hash}.zip
120+ mv ${repo_name}-${hash}* ${repo_name}-${hash}
121+ tar -czf ${repo_name}-${hash}.tar.gz ${repo_name}-${hash}
122+ rm -r ${repo_name}-${hash}
123+ }
124+
125+ # prepare separate folder for new components
126+ rm -rf new_files
127+ mkdir new_files
128+ cd new_files
129+
130+ for ((i=0; i<${#repo_names[@]}; i++)); do
131+ repo_name="${repo_names[$i]}"
132+ is_release="${is_releases[$i]}"
133+ version="${versions[$i]}"
134+
135+ if [ "$is_release" == "true" ]; then
136+ download_release "$repo_name" "$version"
137+ else
138+ download_commit "$repo_name" "$version"
139+ fi
140+ done
141+ ` ` `
142+
143+ Once there is a machine-readable distribution, simplify the bash script accordingly.
144+
145+ 5. Upload to DaRUS
146+
147+ ` ` ` python
148+ from easyDataverse import Dataverse
149+
150+ SERVER_URL = " https://darus.uni-stuttgart.de"
151+
152+ # get the token from DaRUS after logging in; valid for one year
153+ API_TOKEN = " yourToken"
154+
155+ darus = Dataverse(SERVER_URL, API_TOKEN)
156+
157+ dataset = darus.dataset_from_json(open(" ./dataset_new.json" ))
158+
159+ # datasetContactEmail is a field that cannot yet be downloaded, need to provide manually
160+ dataset.citation.dataset_contact[0].email = " benjamin.uekermann@ipvs.uni-stuttgart.de"
161+
162+ dataset.add_directory(
163+ dirpath=" ./new_files/" ,
164+ )
165+
166+ # delete pid to upload to new dataset, otherwise existing one overwritten
167+ dataset.p_id = None
168+ # upload to dataverse of US3
169+ dataset.upload(" ipvs_us3" )
170+
171+ 6. Review and publish
172+
173+ The upload returns a url, which gives you access to the dataset. Review carefully. Currently, for example, licenses still need manual editing. Once ready, publish (i.e. send to DaRUS team for review).
0 commit comments