File "/root/.cache/huggingface/modules/transformers_modules/chatglm2-6b/modeling_chatglm.py", line 1191, in quantize self.transformer.encoder = quantize(self.transformer.encoder, bits, empty_init=empty_init, device=device, File "/root/.cache/huggingface/modules/transformers_modules/chatglm2-6b/qu...
name=self._name)partition_axes=self._partition_axes()iflen(partition_axes)>1:raiseNotImplementedError("Cannot concatenate along more than one dimension: %s. ""Multi-axis partition concat is not supported"%str(partition_axes))partition_ix=partition_axes[0]withops.name_...
[20:28:14] [main/INFO]: Injecting required class transformer 'com.mumfrey.liteloader.client.transformers.CrashReportTransformer' [20:28:14] [main/INFO]: Queuing required class transformer 'com.mumfrey.liteloader.common.transformers.LiteLoaderPacketTransformer' [20:28:14] [main/INFO]: Queuing req...