Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into HEAD
Browse files Browse the repository at this point in the history
  • Loading branch information
adamnovak committed Oct 29, 2024
2 parents 638ec66 + 26db084 commit 0290dd2
Show file tree
Hide file tree
Showing 17 changed files with 179 additions and 201 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/testmac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
jobs:
testmac:
name: Test on Mac
runs-on: macos-12
runs-on: macos-15

steps:
- name: Use cache
Expand All @@ -26,14 +26,14 @@ jobs:
lib
include
bin
key: ${{ runner.os }}-12-${{ github.ref }}
key: ${{ runner.os }}-15-${{ github.ref }}
# Restore keys are a "list", but really only a multiline string is
# accepted. Also we match by prefix. And the most recent cache is
# used, not the most specific.
# See: https://docs.github.com/en/actions/guides/caching-dependencies-to-speed-up-workflows#matching-a-cache-key
restore-keys: |
${{ runner.os }}-12-${{ github.base_ref }}
${{ runner.os }}-12
${{ runner.os }}-15-${{ github.base_ref }}
${{ runner.os }}-15
- name: Checkout code without submodules
uses: actions/checkout@v2
Expand Down
4 changes: 2 additions & 2 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,6 @@ local-build-test-job:
script:
- THREADS=8
- nvm version
- python3 ./configure.py
- source ./source_me.sh
- make get-deps
- make -j${THREADS}
- echo Testing
Expand All @@ -96,6 +94,8 @@ local-build-test-job:
- make test
- make static -j${THREADS}
# Also test as a backend for the tube map
# Tube map expects vg on PATH
- export PATH="$(pwd)/bin:${PATH}"
- git clone https://github.com/vgteam/sequenceTubeMap.git
- cd sequenceTubeMap
# Tube map expects local IPv6 but Kubernetes won't let us have it
Expand Down
7 changes: 3 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ RUN apt-get -qq -y update && apt-get -qq -y upgrade && apt-get -qq -y install \
###DEPS_END###

# Prepare to build submodule dependencies
COPY source_me.sh /vg/source_me.sh
COPY deps /vg/deps
# To increase portability of the docker image, when building for amd64, set the
# target CPU architecture to Nehalem (2008) rather than auto-detecting the
Expand All @@ -59,17 +58,17 @@ RUN if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then sed -i s/m
RUN find . -name CMakeCache.txt | xargs rm -f
# Build the dependencies
COPY Makefile /vg/Makefile
RUN . ./source_me.sh && CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" CFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) deps
RUN CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" CFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) deps

# Bring in the sources, which we need in order to build.
COPY src /vg/src

# Build all the object files for vg, but don't link.
# Also pass the arch here
RUN . ./source_me.sh && CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) objs
RUN CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) objs

# Do the final build and link, knowing the version. Trim down the resulting binary but make sure to include enough debug info for profiling.
RUN . ./source_me.sh && CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) static && strip -d bin/vg
RUN CXXFLAGS="$(if [ -z "${TARGETARCH}" ] || [ "${TARGETARCH}" = "amd64" ] ; then echo " -march=nehalem "; fi)" make -j $((THREADS < $(nproc) ? THREADS : $(nproc))) static && strip -d bin/vg

# Ship the scripts
COPY scripts /vg/scripts
Expand Down
147 changes: 74 additions & 73 deletions Makefile

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ Note that a 64-bit OS is required. Ubuntu 20.04 should work.

#### Linux: Build

When you are ready, build with `. ./source_me.sh && make`. You can use `make -j16` to run 16 build threads at a time, which greatly accelerates the process. If you have more CPU cores, you can use higher numbers.
When you are ready, build with `make`. You can use `make -j16` to run 16 build threads at a time, which greatly accelerates the process. If you have more CPU cores, you can use higher numbers.

Note that vg can take anywhere from 10 minutes to more than an hour to compile depending on your machine and the number of threads used.

Expand Down Expand Up @@ -161,7 +161,7 @@ Homebrew provides another package management solution for OSX, and may be prefer

With dependencies installed, VG can now be built:

. ./source_me.sh && make
make

As with Linux, you can add `-j16` or other numbers at the end to run multiple build tasks at once, if your computer can handle them.

Expand Down
44 changes: 0 additions & 44 deletions configure.py

This file was deleted.

2 changes: 1 addition & 1 deletion deps/sdsl-lite
Submodule sdsl-lite updated 1 files
+10 −1 install.sh
2 changes: 1 addition & 1 deletion doc/wiki
Submodule wiki updated from f70ea3 to f28a1e
2 changes: 1 addition & 1 deletion scripts/setup-server
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 100 --slave
git clone --recursive https://github.com/vgteam/vg.git

# build vg
cd vg && source ./source_me.sh && make -j 32 static && sudo cp bin/vg /usr/local/bin/
cd vg && make -j 32 static && sudo cp bin/vg /usr/local/bin/
sudo cp scripts/vg_sim_pos_compare.py /usr/local/bin/
cd ~

Expand Down
27 changes: 4 additions & 23 deletions source_me.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,23 +1,4 @@
export LIBRARY_PATH=`pwd`/lib:$LIBRARY_PATH
export LD_LIBRARY_PATH=`pwd`/lib:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=`pwd`/lib:$DYLD_LIBRARY_PATH
export LD_INCLUDE_PATH=`pwd`/include:$LD_INCLUDE_PATH
# Setting include directories via C_INCLUDE_PATH/CPLUS_INCLUDE_PATH will
# automatically get them demoted to the end of the search list even if a -I
# option is passed to try and bump them up earlier, before other -I options.
# We leave the Makefile in charge of finding all the include directories.
export CFLAGS="-I $(pwd)/include ${CFLAGS}"
export CXXFLAGS="-I $(pwd)/include -I$(pwd)/include/dynamic ${CXXFLAGS}"
export PATH=`pwd`/bin:`pwd`/scripts:"$PATH"
export CC=$(which gcc)
export CXX=$(which g++)

#
# disable until file arguments work as in normal bash :(
#
# add bash autocompletion
#if test -n "$BASH_VERSION"
#then
#
# . ./autocomp.bash
#fi
# We used to have a script here to set up all the include and library search
# paths for the vg build. But now the Makefile knows how to do it all for the
# build, and the vg binary knows where to look for its dynamic libraries.
echo 1>&2 "Sourcing source_me.sh is no longer necessary"
7 changes: 4 additions & 3 deletions src/deconstructor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1168,9 +1168,10 @@ string Deconstructor::get_vcf_header() {
}

if (sample_to_haps.empty()) {
cerr << "Error [vg deconstruct]: No paths found for alt alleles in the graph. Note that "
<< "exhaustive path-free traversal finding is no longer supported, and vg deconstruct "
<< "now only works on embedded paths and GBWT threads." << endl;
cerr << "Error [vg deconstruct]: No paths other than selected reference(s) found in the graph, "
<< "so no alt alleles can be generated. Note that exhaustive path-free traversal finding "
<< "is no longer supported, and vg deconstruct now only works on embedded paths and GBWT "
<< "threads." << endl;
exit(1);
}

Expand Down
78 changes: 44 additions & 34 deletions src/subcommand/chunk_main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,36 @@ int main_chunk(int argc, char** argv) {
bool chunk_gam = !gam_files.empty() && gam_split_size == 0;
bool chunk_graph = gam_and_graph || (!chunk_gam && gam_split_size == 0);

// parse the regions into a list before loading the graph, if we're
// specifying regions by path name.
vector<Region> regions;
if (!region_strings.empty()) {
for (auto& region_string : region_strings) {
Region region;
parse_region(region_string, region);
regions.push_back(region);
}
}
if (!path_list_file.empty()) {
ifstream pr_stream(path_list_file.c_str());
if (!pr_stream) {
cerr << "error:[vg chunk] unable to open path regions: " << path_list_file << endl;
return 1;
}
while (pr_stream) {
string buf;
std::getline(pr_stream, buf);
if (!buf.empty()) {
Region region;
parse_region(buf, region);
regions.push_back(region);
}
}
}
if (!in_bed_file.empty()) {
parse_bed_regions(in_bed_file, regions);
}

// Load the snarls
unique_ptr<SnarlManager> snarl_manager;
if (!snarl_filename.empty()) {
Expand Down Expand Up @@ -377,9 +407,17 @@ int main_chunk(int argc, char** argv) {
return 1;
}
in.close();

// To support the regions we were asked for, we might need to ensure
// the paths they are on are actually indexed for reference style
// offset lookups.
std::unordered_set<std::string> ensure_indexed;
for (auto& region : regions) {
ensure_indexed.insert(region.seq);
}

path_handle_graph = vg::io::VPKG::load_one<PathHandleGraph>(xg_file);
graph = overlay_helper.apply(path_handle_graph.get());
graph = overlay_helper.apply(path_handle_graph.get(), ensure_indexed);
in.close();
}

Expand Down Expand Up @@ -463,35 +501,7 @@ int main_chunk(int argc, char** argv) {
// (instead of an index)
unordered_map<nid_t, int32_t> node_to_component;

// parse the regions into a list
vector<Region> regions;
if (!region_strings.empty()) {
for (auto& region_string : region_strings) {
Region region;
parse_region(region_string, region);
regions.push_back(region);
}
}
else if (!path_list_file.empty()) {
ifstream pr_stream(path_list_file.c_str());
if (!pr_stream) {
cerr << "error:[vg chunk] unable to open path regions: " << path_list_file << endl;
return 1;
}
while (pr_stream) {
string buf;
std::getline(pr_stream, buf);
if (!buf.empty()) {
Region region;
parse_region(buf, region);
regions.push_back(region);
}
}
}
else if (!in_bed_file.empty()) {
parse_bed_regions(in_bed_file, regions);
}
else if (id_range) {
if (id_range) {
if (n_chunks) {
// Determine the ranges from the source graph itself.
// how many nodes per range?
Expand Down Expand Up @@ -556,9 +566,9 @@ int main_chunk(int argc, char** argv) {
delete range_stream;
}
}
else if (graph != nullptr && (!components || path_components)) {
// every path
graph->for_each_path_handle([&](path_handle_t path_handle) {
if (graph != nullptr && path_components) {
// every reference or generic path (guaranteed to be reference indexed)
graph->for_each_path_matching({PathSense::REFERENCE, PathSense::GENERIC}, {}, {}, [&](path_handle_t path_handle) {
Region region;
region.seq = graph->get_path_name(path_handle);
if (!Paths::is_alt(region.seq)) {
Expand Down Expand Up @@ -596,7 +606,7 @@ int main_chunk(int argc, char** argv) {
if (!id_range) {
for (auto& region : regions) {
if (!graph->has_path(region.seq)) {
cerr << "error[vg chunk]: input path " << region.seq << " not found in xg index" << endl;
cerr << "error[vg chunk]: input path " << region.seq << " not found in graph" << endl;
return 1;
}
region.start = max((int64_t)0, region.start);
Expand Down
Loading

1 comment on commit 0290dd2

@adamnovak
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

vg CI tests complete for branch glenn. View the full report here.

16 tests passed, 0 tests failed and 0 tests skipped in 17717 seconds

Please sign in to comment.