Skip to content

Commit

Permalink
fix distillation api examples (PaddlePaddle#50)
Browse files Browse the repository at this point in the history
  • Loading branch information
baiyfbupt authored Jan 21, 2020
1 parent 78c8e4e commit eb48cb6
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 231 deletions.
16 changes: 8 additions & 8 deletions docs/docs/api/single_distiller_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,10 @@ with fluid.program_guard(teacher_program):
data_name_map = {'y':'x'}
USE_GPU = False
place = fluid.CUDAPlace(0) if USE_GPU else fluid.CPUPlace()
merge(teacher_program, student_program, data_name_map, place)
dist.merge(teacher_program, student_program, data_name_map, place)
with fluid.program_guard(student_program):
distillation_loss = dist.fsp_loss('teacher_t1.tmp_1', 'teacher_t2.tmp_1',
's1.tmp_1', 's2.tmp_1', main_program)
's1.tmp_1', 's2.tmp_1', student_program)
```


Expand Down Expand Up @@ -116,10 +116,10 @@ with fluid.program_guard(teacher_program):
data_name_map = {'y':'x'}
USE_GPU = False
place = fluid.CUDAPlace(0) if USE_GPU else fluid.CPUPlace()
merge(teacher_program, student_program, data_name_map, place)
dist.merge(teacher_program, student_program, data_name_map, place)
with fluid.program_guard(student_program):
distillation_loss = dist.l2_loss('teacher_t2.tmp_1', 's2.tmp_1',
main_program)
student_program)
```


Expand Down Expand Up @@ -158,10 +158,10 @@ with fluid.program_guard(teacher_program):
data_name_map = {'y':'x'}
USE_GPU = False
place = fluid.CUDAPlace(0) if USE_GPU else fluid.CPUPlace()
merge(teacher_program, student_program, data_name_map, place)
dist.merge(teacher_program, student_program, data_name_map, place)
with fluid.program_guard(student_program):
distillation_loss = dist.soft_label_loss('teacher_t2.tmp_1',
's2.tmp_1', main_program, 1., 1.)
's2.tmp_1', student_program, 1., 1.)
```


Expand Down Expand Up @@ -198,14 +198,14 @@ with fluid.program_guard(teacher_program):
data_name_map = {'y':'x'}
USE_GPU = False
place = fluid.CUDAPlace(0) if USE_GPU else fluid.CPUPlace()
merge(teacher_program, student_program, data_name_map, place)
dist.merge(teacher_program, student_program, data_name_map, place)
def adaptation_loss(t_var, s_var):
teacher_channel = t_var.shape[1]
s_hint = fluid.layers.conv2d(s_var, teacher_channel, 1)
hint_loss = fluid.layers.reduce_mean(fluid.layers.square(s_hint - t_var))
return hint_loss
with fluid.program_guard(student_program):
distillation_loss = dist.loss(main_program, adaptation_loss,
distillation_loss = dist.loss(adaptation_loss, student_program,
t_var='teacher_t2.tmp_1', s_var='s2.tmp_1')
```

Expand Down
2 changes: 2 additions & 0 deletions paddleslim/dist/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,5 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .single_distiller import merge, fsp_loss, l2_loss, soft_label_loss, loss
223 changes: 0 additions & 223 deletions paddleslim/dist/mp_distiller.py

This file was deleted.

0 comments on commit eb48cb6

Please sign in to comment.