# Install all the things as root to work around many issues
sudo su -
#!/bin/bash
echo "Checking for CUDA and installing."
# Check for CUDA and try to install.
if ! dpkg-query -W cuda; then
# The 16.04 installer works with 16.10.
curl -O http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/cuda-repo-ubuntu1604_8.0.61-1_amd64.deb
dpkg -i ./cuda-repo-ubuntu1604_8.0.61-1_amd64.deb
apt-get update
apt-get install cuda -y
fi
# Sanity check (should see all GPUs listed here)
nvidia-smi
#/bin/bash
# install packages to allow apt to use a repository over HTTPS:
apt-get -y install \
apt-transport-https ca-certificates curl software-properties-common
# add Docker’s official GPG key:
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
# set up the Docker stable repository.
add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
# update the apt package index:
apt-get -y update
# finally, install docker
apt-get -y install docker-ce
wget https://github.com/NVIDIA/nvidia-docker/releases/download/v1.0.1/nvidia-docker_1.0.1-1_amd64.deb
dpkg -i nvidia-docker*.deb
# Sanity check (should run without any issues)
nvidia-docker run --rm nvidia/cuda nvidia-smi
exit # from root
# Sudoless docker setup:
sudo usermod -aG docker $USER
sudo systemctl restart docker
exit # logout completely
gcloud compute ssh ... # new login
# Sanity check (should run as a user)
docker run hello-world
nvidia-docker run --rm nvidia/cuda nvidia-smi
Setup is done. Let's pull in relevant programs and scripts:
cd $HOME
mkdir repos
cd repos
git clone https://github.com/hammerlab/cytokit.git && mv cytokit codex
git clone https://github.com/hammerlab/cvutils.git
git clone https://github.com/hammerlab/cell-image-analysis.git
cat << EOF >> ~/cytokit.env
export CODEX_DATA_DIR=$HOME/data
export CODEX_REPO_DIR=$HOME/repos/codex
export CVUTILS_REPO_DIR=$HOME/repos/cvutils
export CODEX_ANALYSIS_REPO_DIR=$HOME/repos/cell-image-analysis
EOF
source ~/cytokit.env
mkdir -p $CODEX_DATA_DIR
cd $CODEX_DATA_DIR
gsutil cp -r gs://musc-codex/models .
cd $CODEX_DATA_DIR
mkdir 20180614_D22_RepA_Tcell_CD4-CD8-DAPI_5by5
cd 20180614_D22_RepA_Tcell_CD4-CD8-DAPI_5by5
gsutil -m cp -r gs://musc-codex/datasets/20180614_D22_RepA_Tcell_CD4-CD8-DAPI_5by5 .
mv 20180614_D22_RepA_Tcell_CD4-CD8-DAPI_5by5 raw
We now have all we need (scripts/data). Let's run the analysis:
Some preliminary notes:
Cytokit on gcloud
Spin up a machine on gcloud: 2 GPUs Nvidia K80
Inspiration: https://medium.com/google-cloud/jupyter-tensorflow-nvidia-gpu-docker-google-compute-engine-4a146f085f17
Setup is done. Let's pull in relevant programs and scripts:
We now have all we need (scripts/data). Let's run the analysis:
You can now connect to your notebook running on your gcloud instance using its public ID. Once on it, create a new console tab and run the following:
Should be done in < 30 minutes. You can
gsutil cp
it to a bucket and turn your gcloud box off.Need to clean this up a bit.