Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ repos:
- id: check-added-large-files
args: ["--maxkb=4000"]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 25.11.0
rev: 26.1.0
hooks:
- id: black
- repo: https://github.com/pycqa/isort
Expand All @@ -33,7 +33,7 @@ repos:
# additional_dependencies: [cpplint==1.6.1]
# types_or: [c++]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.5
rev: v0.14.14
hooks:
- id: ruff
args: [ "--fix", "--config", "ruff.toml" ]
1 change: 1 addition & 0 deletions doc/sphinx_util.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
"""Helper utilty function for customization."""

import os
import subprocess
import sys
Expand Down
1 change: 1 addition & 0 deletions tracker/dmlc_tracker/kubernetes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

One need to make sure kubectl-able.
"""

from __future__ import absolute_import

import yaml
Expand Down
3 changes: 2 additions & 1 deletion tracker/dmlc_tracker/launcher.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
# pylint: disable=invalid-name
"""The container launcher script that launches DMLC with the right env variable."""

from __future__ import absolute_import

import glob
Expand Down Expand Up @@ -55,7 +56,7 @@ def main():
if hadoop_home:
library_path.append("%s/lib/native" % hdfs_home)
library_path.append("%s/lib" % hdfs_home)
(classpath, _) = subprocess.Popen(
classpath, _ = subprocess.Popen(
"%s/bin/hadoop classpath" % hadoop_home,
stdout=subprocess.PIPE,
shell=True,
Expand Down
1 change: 1 addition & 0 deletions tracker/dmlc_tracker/mesos.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

One need to make sure all slaves machines are ssh-able.
"""

from __future__ import absolute_import

import json
Expand Down
2 changes: 1 addition & 1 deletion tracker/dmlc_tracker/mpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def get_mpi_env(envs):
return cmd

# decide MPI version.
(out, err) = subprocess.Popen(
out, err = subprocess.Popen(
["mpirun", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
).communicate()
if b"Open MPI" in out:
Expand Down
3 changes: 2 additions & 1 deletion tracker/dmlc_tracker/opts.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# pylint: disable=invalid-name
"""Command line options of job submission script."""

import argparse
import os

Expand Down Expand Up @@ -317,7 +318,7 @@ def get_opts(args=None):
type=int,
help=("Number of attempt local tracker can restart slave."),
)
(args, unknown) = parser.parse_known_args(args)
args, unknown = parser.parse_known_args(args)
args.command += unknown

if args.cluster is None:
Expand Down
3 changes: 2 additions & 1 deletion tracker/dmlc_tracker/ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

One need to make sure all slaves machines are ssh-able.
"""

from __future__ import absolute_import

import logging
Expand Down Expand Up @@ -98,7 +99,7 @@ def run(prog):
# launch jobs
for i in range(nworker + nserver):
pass_envs["DMLC_ROLE"] = "server" if i < nserver else "worker"
(node, port) = hosts[i % len(hosts)]
node, port = hosts[i % len(hosts)]
pass_envs["DMLC_NODE_HOST"] = node
prog = (
get_env(pass_envs)
Expand Down
4 changes: 2 additions & 2 deletions tracker/dmlc_tracker/yarn.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ def yarn_submit(args, nworker, nserver, pass_env):
), "failed to build dmlc-yarn.jar, try it manually"

# detech hadoop version
(out, _) = subprocess.Popen(
out, _ = subprocess.Popen(
"%s version" % hadoop_binary, shell=True, stdout=subprocess.PIPE
).communicate()
out = py_str(out).split("\n")[0].split()
assert out[0] == "Hadoop", "cannot parse hadoop version string"
hadoop_version = int(out[1].split(".")[0])
(classpath, _) = subprocess.Popen(
classpath, _ = subprocess.Popen(
"%s classpath" % hadoop_binary, shell=True, stdout=subprocess.PIPE
).communicate()
classpath = py_str(classpath).strip()
Expand Down
Loading