Skip to content

Commit

Permalink
add
Browse files Browse the repository at this point in the history
  • Loading branch information
陈科研 committed Nov 26, 2023
1 parent 4681748 commit db80d6d
Show file tree
Hide file tree
Showing 8 changed files with 55 additions and 14 deletions.
2 changes: 1 addition & 1 deletion README_zh-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ mim install "mmcv>=2.0.0"
**步骤 4**:安装其他依赖项。

```shell
pip install -U transformers wandb einops pycocotools shapely scipy terminaltables
pip install -U transformers wandb einops pycocotools shapely scipy terminaltables deepspeed
```

</details>
Expand Down
2 changes: 0 additions & 2 deletions configs/rsprompter/rsprompter_anchor-nwpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@
]

#### AMP training config

runner_type = 'Runner'
optim_wrapper = dict(
type='AmpOptimWrapper',
Expand All @@ -136,7 +135,6 @@
)

#### DeepSpeed training config

# runner_type = 'FlexibleRunner'
# strategy = dict(
# type='DeepSpeedStrategy',
Expand Down
2 changes: 0 additions & 2 deletions configs/rsprompter/rsprompter_anchor-ssdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@
]

#### AMP training config

runner_type = 'Runner'
optim_wrapper = dict(
type='AmpOptimWrapper',
Expand All @@ -134,7 +133,6 @@
)

#### DeepSpeed training config

# runner_type = 'FlexibleRunner'
# strategy = dict(
# type='DeepSpeedStrategy',
Expand Down
2 changes: 0 additions & 2 deletions configs/rsprompter/rsprompter_anchor-whu.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@
]

#### AMP training config

runner_type = 'Runner'
optim_wrapper = dict(
type='AmpOptimWrapper',
Expand All @@ -133,7 +132,6 @@
)

#### DeepSpeed training config

# runner_type = 'FlexibleRunner'
# strategy = dict(
# type='DeepSpeedStrategy',
Expand Down
15 changes: 13 additions & 2 deletions configs/rsprompter/rsprompter_query-nwpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
)
]

# runner_type = 'Runner'
#### DeepSpeed Configs
runner_type = 'FlexibleRunner'
strategy = dict(
type='DeepSpeedStrategy',
Expand Down Expand Up @@ -156,4 +156,15 @@
lr=base_lr,
weight_decay=0.05
)
)
)

# #### AMP training config
# runner_type = 'Runner'
# optim_wrapper = dict(
# type='AmpOptimWrapper',
# dtype='float16',
# optimizer=dict(
# type='AdamW',
# lr=base_lr,
# weight_decay=0.05)
# )
15 changes: 13 additions & 2 deletions configs/rsprompter/rsprompter_query-ssdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@
)
]

# runner_type = 'Runner'
#### DeepSpeed Configs
runner_type = 'FlexibleRunner'
strategy = dict(
type='DeepSpeedStrategy',
Expand Down Expand Up @@ -157,4 +157,15 @@
lr=base_lr,
weight_decay=0.05
)
)
)

# #### AMP training config
# runner_type = 'Runner'
# optim_wrapper = dict(
# type='AmpOptimWrapper',
# dtype='float16',
# optimizer=dict(
# type='AdamW',
# lr=base_lr,
# weight_decay=0.05)
# )
14 changes: 13 additions & 1 deletion configs/rsprompter/rsprompter_query-whu.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@
)
]

# runner_type = 'Runner'
#### DeepSpeed Configs
runner_type = 'FlexibleRunner'
strategy = dict(
type='DeepSpeedStrategy',
Expand Down Expand Up @@ -159,5 +159,17 @@
)
)

# #### AMP training config
# runner_type = 'Runner'
# optim_wrapper = dict(
# type='AmpOptimWrapper',
# dtype='float16',
# optimizer=dict(
# type='AdamW',
# lr=base_lr,
# weight_decay=0.05)
# )




17 changes: 15 additions & 2 deletions configs/rsprompter/samseg-mask2former-ssdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@
)
]

# runner_type = 'Runner'
#### deepspeed related configs
runner_type = 'FlexibleRunner'
strategy = dict(
type='DeepSpeedStrategy',
Expand Down Expand Up @@ -143,4 +143,17 @@
lr=base_lr,
weight_decay=0.05
)
)
)

#### AMP related configs
# runner_type = 'Runner'
# optim_wrapper = dict(
# type='AmpOptimWrapper',
# dtype='float16',
# optimizer=dict(
# type='AdamW',
# lr=base_lr,
# weight_decay=0.05,
# eps=1e-8,
# betas=(0.9, 0.999))
# )

0 comments on commit db80d6d

Please sign in to comment.