Master advanced Linux development tools, Python environments, Docker containerization, and professional AI deployment workflows on Linux systems.
# Download Miniforge installer
wget <https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh>
# Make executable and install
chmod +x Miniforge3-Linux-x86\_64.sh
./Miniforge3-Linux-x86\_64.sh
# Follow installation prompts, then restart shell
source ~/.bashrc
# Create conda environment for AI development
conda create -n ai-dev python=3.11 -y
conda activate ai-dev
# Install essential AI packages
conda install -y \
numpy \
pandas \
matplotlib \
seaborn \
scikit-learn \
jupyter \
jupyterlab \
ipython
# Install PyTorch (CPU version for now)
conda install pytorch torchvision torchaudio cpuonly -c pytorch
# Install additional packages with pip
pip install \
transformers \
openai \
anthropic \
langchain \
streamlit \
fastapi \
uvicorn \
requests \
beautifulsoup4 \
selenium
# Check for NVIDIA GPU
nvidia-smi
# Install CUDA toolkit (if GPU available)
wget <https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/cuda-wsl-ubuntu.pin>
sudo mv cuda-wsl-ubuntu.pin /etc/apt/preferences.d/cuda-repository-pin-600
sudo apt-key adv --fetch-keys <https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/3bf863cc.pub>
sudo add-apt-repository "deb <https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/> /"
sudo apt update
sudo apt install -y cuda-toolkit-12-2
# Install PyTorch with CUDA support
conda install pytorch torchvision torchaudio pytorch-cuda=12.1 -c pytorch -c nvidia