a
    ¾ÀhÑ  ã                   @   s    d dl mZ dd„ Zdd„ ZdS )é    )Úceilc                    sÎ   t td|ƒƒ‰ dd„ t |  ¡ ƒD ƒ‰g }ˆD ]$}ˆ |¡dkr,||vr,| |¡ q,‡fdd„ˆ D ƒ}‡ fdd„ˆD ƒ}t|ƒdkr’tdt|ƒ ƒ‚t|ƒdkr®tdt|ƒ ƒ‚t|ƒdkrÊtd	t|ƒ ƒ‚d S )
Nr   c                 S   s   g | ]}|D ]}|‘qqS © r   )Ú.0ZsublistÚitemr   r   úc/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/utils/model_parallel_utils.pyÚ
<listcomp>   ó    z%assert_device_map.<locals>.<listcomp>é   c                    s   g | ]}|ˆ vr|‘qS r   r   ©r   Úi)Údevice_map_blocksr   r   r      r   c                    s   g | ]}|ˆ vr|‘qS r   r   r
   )Úblocksr   r   r      r   zœDuplicate attention blocks specified in device_map. Attention blocks must be specified to one device. These attention blocks were specified more than once: zŽThere are attention blocks for this model that are not specified in the device_map. Add these attention blocks to a device on the device_map: zdThe device_map contains more attention blocks than this model has. Remove these from the device_map:)ÚlistÚrangeÚvaluesÚcountÚappendÚlenÚ
ValueErrorÚstr)Z
device_mapZ
num_blocksZduplicate_blocksr   Zmissing_blocksZextra_blocksr   )r   r   r   Úassert_device_map   s4    ÿÿÿÿÿÿr   c                    sJ   t t| ƒƒ‰ tt| t|ƒ ƒƒ‰‡ ‡fdd„td| ˆƒD ƒ}tt||ƒƒS )zEReturns a dictionary of layers distributed evenly across all devices.c                    s   g | ]}ˆ ||ˆ … ‘qS r   r   r
   ©ZlayersZn_blocksr   r   r   5   r   z"get_device_map.<locals>.<listcomp>r   )r   r   Úintr   r   ÚdictÚzip)Zn_layersZdevicesZlayers_listr   r   r   Úget_device_map1   s    r   N)Úmathr   r   r   r   r   r   r   Ú<module>   s   