c&&t.push(new G(F(s,c),F(s,we(u,l,n))))}t.length||t.push(new G(f,f)),U(g,vi(d,y.ranges.slice(0,v).concat(t),v),{origin:"*mouse",scroll:!1}),d.scrollIntoView(e)}else{var h,r=m,i=ul(d,e,p.unit),e=r.anchor,e=0=n.to||o.linea.bottom?20:0)&&setTimeout(I(d,function(){u==i&&(l.scroller.scrollTop+=r,e(t))}),50))}:n)(e)}),i=I(d,n);d.state.selectingText=i,k(l.wrapper.ownerDocument,"mousemove",r),k(l.wrapper.ownerDocument,"mouseup",i)})(i,s,o,a)):Qe(e)==h.scroller&&D(e):2==n?(t&&Gi(c.doc,t),setTimeout(function(){return h.input.focus()},20)):3==n&&(Q?c.display.input.onContextMenu(e):Mr(c)))))}function ul(e,t,n){if("char"==n)return new G(t,t);if("word"==n)return e.findWordAt(t);if("line"==n)return new G(F(t.line,0),E(e.doc,F(t.line+1,0)));n=n(e,t);return new G(n.from,n.to)}function cl(e,t,n,r){var i,o;if(t.touches)i=t.touches[0].clientX,o=t.touches[0].clientY;else try{i=t.clientX,o=t.clientY}catch(e){return!1}if(i>=Math.floor(e.display.gutters.getBoundingClientRect().right))return!1;r&&D(t);var l=e.display,r=l.lineDiv.getBoundingClientRect();if(o>r.bottom||!Ye(e,n))return qe(t);o-=r.top-l.viewOffset;for(var s=0;s=i)return O(e,n,e,bt(e.doc,o),e.display.gutterSpecs[s].className,t),qe(t)}}function hl(e,t){return cl(e,t,"gutterClick",!0)}function dl(e,t){var n,r;An(e.display,t)||(r=t,Ye(n=e,"gutterContextMenu")&&cl(n,r,"gutterContextMenu",!1))||A(e,t,"contextmenu")||Q||e.display.input.onContextMenu(t)}function fl(e){e.display.wrapper.className=e.display.wrapper.className.replace(/\s*cm-s-\S+/g,"")+e.options.theme.replace(/(^|\s)\s*/g," cm-s-"),Yn(e)}ol.prototype.compare=function(e,t,n){return this.time+400>e&&0==P(t,this.pos)&&n==this.button};var pl={toString:function(){return"CodeMirror.Init"}},gl={},ml={};function vl(e,t,n){!t!=!(n&&n!=pl)&&(n=e.display.dragFunctions,(t=t?k:T)(e.display.scroller,"dragstart",n.start),t(e.display.scroller,"dragenter",n.enter),t(e.display.scroller,"dragover",n.over),t(e.display.scroller,"dragleave",n.leave),t(e.display.scroller,"drop",n.drop))}function yl(e){e.options.lineWrapping?(ie(e.display.wrapper,"CodeMirror-wrap"),e.display.sizer.style.minWidth="",e.display.sizerWidth=null):(ee(e.display.wrapper,"CodeMirror-wrap"),an(e)),pr(e),R(e),Yn(e),setTimeout(function(){return jr(e)},100)}function p(e,t){var n=this;if(!(this instanceof p))return new p(e,t);this.options=t=t?fe(t):{},fe(gl,t,!1);var r,i=t.value,o=("string"==typeof i?i=new f(i,t.mode,null,t.lineSeparator,t.direction):t.mode&&(i.modeOption=t.mode),this.doc=i,new p.inputStyles[t.inputStyle](this)),e=this.display=new hi(e,i,o,t),l=(fl(e.wrapper.CodeMirror=this),t.lineWrapping&&(this.display.wrapper.className+=" CodeMirror-wrap"),$r(this),this.state={keyMaps:[],overlays:[],modeGen:0,overwrite:!1,delayingBlurEvent:!1,focused:!1,suppressEdits:!1,pasteIncoming:-1,cutIncoming:-1,selectingText:!1,draggingText:!1,highlight:new pe,keySeq:null,specialChars:null},t.autofocus&&!_&&e.input.focus(),w&&v<11&&setTimeout(function(){return n.display.input.reset(!0)},20),this),s=l.display;k(s.scroller,"mousedown",I(l,al)),k(s.scroller,"dblclick",w&&v<11?I(l,function(e){var t;A(l,e)||(!(t=gr(l,e))||hl(l,e)||An(l.display,e)||(D(e),e=l.findWordAt(t),Gi(l.doc,e.anchor,e.head)))}):function(e){return A(l,e)||D(e)}),k(s.scroller,"contextmenu",function(e){return dl(l,e)}),k(s.input.getField(),"contextmenu",function(e){s.scroller.contains(e.target)||dl(l,e)});var a,u={end:0};function c(){s.activeTouch&&(a=setTimeout(function(){return s.activeTouch=null},1e3),(u=s.activeTouch).end=+new Date)}function h(e,t){if(null==t.left)return 1;var n=t.left-e.left,t=t.top-e.top;return 400o.first?S(W(o,t-1).text,null,l):0:"add"==n?c=a+e.options.indentUnit:"subtract"==n?c=a-e.options.indentUnit:"number"==typeof n&&(c=a+n);var c=Math.max(0,c),h="",d=0;if(e.options.indentWithTabs)for(var f=Math.floor(c/l);f;--f)d+=l,h+="\t";if(dl,a=rt(t),u=null;if(s&&1l?"cut":"+input")});to(e.doc,f),b(e,"inputRead",e,f)}t&&!s&&kl(e,t),Pr(e),e.curOp.updateInput<2&&(e.curOp.updateInput=h),e.curOp.typing=!0,e.state.pasteIncoming=e.state.cutIncoming=-1}function Ll(e,t){var n=e.clipboardData&&e.clipboardData.getData("Text");return n&&(e.preventDefault(),t.isReadOnly()||t.options.disableInput||!t.hasFocus()||h(t,function(){return Sl(t,n,0,null,"paste")}),1)}function kl(e,t){if(e.options.electricChars&&e.options.smartIndent)for(var n=e.doc.sel,r=n.ranges.length-1;0<=r;r--){var i=n.ranges[r];if(!(100=n.first+n.size||(r=new F(e,r.ch,r.sticky),!(s=W(n,e))))return;r=jo(l,n.cm,s,r.line,a)}else r=t;return 1}if("char"==o||"codepoint"==o)u();else if("column"==o)u(!0);else if("word"==o||"group"==o)for(var c=null,h="group"==o,d=n.cm&&n.cm.getHelper(r,"wordChars"),f=!0;!(i<0)||u(!f);f=!1){var p=s.text.charAt(r.ch)||"\n",p=Ne(p,d)?"w":h&&"\n"==p?"n":!h||/\s/.test(p)?null:"p";if(!h||f||p||(p="s"),c&&c!=p){i<0&&(i=1,u(),r.sticky="after");break}if(p&&(c=p),0=s.height){l.hitSide=!0;break}o+=5*n}return l}function r(e){this.cm=e,this.lastAnchorNode=this.lastAnchorOffset=this.lastFocusNode=this.lastFocusOffset=null,this.polling=new pe,this.composing=null,this.gracePeriod=!1,this.readDOMTimeout=null}function Dl(e,t){var n=In(e,t.line);if(!n||n.hidden)return null;var r=W(e.doc,t.line),n=Rn(n,r,t.line),r=Ve(r,e.doc.direction),e="left",r=(r&&(e=Pe(r,t.ch)%2?"right":"left"),Kn(n.map,t.ch,e));return r.offset="right"==r.collapse?r.end:r.start,r}function Wl(e,t){return t&&(e.bad=!0),e}function Hl(e,t,n){var r;if(t==e.display.lineDiv){if(!(r=e.display.lineDiv.childNodes[n]))return Wl(e.clipPos(F(e.display.viewTo-1)),!0);t=null,n=0}else for(r=t;;r=r.parentNode){if(!r||r==e.display.lineDiv)return null;if(r.parentNode&&r.parentNode==e.display.lineDiv)break}for(var i=0;i=t.display.viewTo||n.line=t.display.viewFrom&&Dl(t,r)||{node:i[0].measure.map[2],offset:0},r=n.linet.firstLine()&&(i=F(i.line-1,W(t.doc,i.line-1).length)),r.ch==W(t.doc,r.line).text.length&&r.linen.viewTo-1)return!1;var o,l=i.line==n.viewFrom||0==(l=mr(t,i.line))?(e=H(n.view[0].line),n.view[0].node):(e=H(n.view[l].line),n.view[l-1].node.nextSibling),r=mr(t,r.line),n=r==n.view.length-1?(o=n.viewTo-1,n.lineDiv.lastChild):(o=H(n.view[r+1].line)-1,n.view[r+1].node.previousSibling);if(!l)return!1;for(var s=t.doc.splitLines(function(o,e,t,l,s){var n="",a=!1,u=o.doc.lineSeparator(),c=!1;function h(){a&&(n+=u,c&&(n+=u),a=c=!1)}function d(e){e&&(h(),n+=e)}for(;!function e(t){if(1==t.nodeType){var n=t.getAttribute("cm-text");if(n)d(n);else if(n=t.getAttribute("cm-marker"))(n=o.findMarks(F(l,0),F(s+1,0),(i=+n,function(e){return e.id==i}))).length&&(n=n[0].find(0))&&d(mt(o.doc,n.from,n.to).join(u));else if("false"!=t.getAttribute("contenteditable")&&(n=/^(pre|div|p|li|table|br)$/i.test(t.nodeName),/^br$/i.test(t.nodeName)||0!=t.textContent.length)){n&&h();for(var r=0;ri.ch&&p.charCodeAt(p.length-c-1)==g.charCodeAt(g.length-c-1);)u--,c++;s[s.length-1]=p.slice(0,p.length-c).replace(/^\u200b+/,""),s[0]=s[0].slice(u).replace(/\u200b+$/,"");r=F(e,u),l=F(o,a.length?z(a).length-c:0);return 1n&&(wl(this,i.head.line,e,!0),n=i.head.line,r==this.doc.sel.primIndex&&Pr(this));else{for(var o=i.from(),i=i.to(),l=Math.max(n,o.line),n=Math.min(this.lastLine(),i.line-(i.ch?0:1))+1,s=l;s>1;if((l?n[2*l-1]:0)>=o)i=l;else{if(!(n[2*l+1]l)&&e.top>t.offsetHeight?a=e.top-t.offsetHeight:e.bottom+t.offsetHeight<=l&&(a=e.bottom),u+t.offsetWidth>o&&(u=o-t.offsetWidth)),t.style.top=a+"px",t.style.left=t.style.right="","right"==i?(u=s.sizer.clientWidth-t.offsetWidth,t.style.right="0px"):("left"==i?u=0:"middle"==i&&(u=(s.sizer.clientWidth-t.offsetWidth)/2),t.style.left=u+"px"),n&&(r=this,l={left:u,top:a,right:u+t.offsetWidth,bottom:a+t.offsetHeight},null!=(l=Hr(r,l)).scrollTop&&Ir(r,l.scrollTop),null!=l.scrollLeft&&Gr(r,l.scrollLeft))},triggerOnKeyDown:t(nl),triggerOnKeyPress:t(il),triggerOnKeyUp:rl,triggerOnMouseDown:t(al),execCommand:function(e){if(Yo.hasOwnProperty(e))return Yo[e].call(null,this)},triggerElectric:t(function(e){kl(this,e)}),findPosH:function(e,t,n,r){for(var i=1,o=(t<0&&(i=-1,t=-t),E(this.doc,e)),l=0;l2&&z.push("'"+this.terminals_[w]+"'");var A="";A=this.lexer.showPosition?"Parse error on line "+(h+1)+":\n"+this.lexer.showPosition()+"\nExpecting "+z.join(", ")+", got '"+this.terminals_[p]+"'":"Parse error on line "+(h+1)+": Unexpected "+(1==p?"end of input":"'"+(this.terminals_[p]||p)+"'"),this.parseError(A,{text:this.lexer.match,token:this.terminals_[p]||p,line:this.lexer.yylineno,loc:m,expected:z})}if(3==j){if(p==l)throw new Error(A||"Parsing halted.");i=this.lexer.yyleng,g=this.lexer.yytext,h=this.lexer.yylineno,m=this.lexer.yylloc,p=o()}for(;;){if(k.toString()in f[r])break;if(0==r)throw new Error(A||"Parsing halted.");n(1),r=c[c.length-1]}q=p,p=k,r=c[c.length-1],s=f[r]&&f[r][k],j=3}if(s[0]instanceof Array&&s.length>1)throw new Error("Parse Error: multiple actions possible at state: "+r+", token: "+p);switch(s[0]){case 1:c.push(p),d.push(this.lexer.yytext),e.push(this.lexer.yylloc),c.push(s[1]),p=null,q?(p=q,q=null):(i=this.lexer.yyleng,g=this.lexer.yytext,h=this.lexer.yylineno,m=this.lexer.yylloc,j>0&&j--);break;case 2:if(x=this.productions_[s[1]][1],v.$=d[d.length-x],v._$={first_line:e[e.length-(x||1)].first_line,last_line:e[e.length-1].last_line,first_column:e[e.length-(x||1)].first_column,last_column:e[e.length-1].last_column},u=this.performAction.call(v,g,i,h,this.yy,s[1],d,e),"undefined"!=typeof u)return u;x&&(c=c.slice(0,2*-1*x),d=d.slice(0,-1*x),e=e.slice(0,-1*x)),c.push(this.productions_[s[1]][0]),d.push(v.$),e.push(v._$),y=f[c[c.length-2]][c[c.length-1]],c.push(y);break;case 3:return!0}}return!0}},b=function(){var a={EOF:1,parseError:function(a,b){if(!this.yy.parseError)throw new Error(a);this.yy.parseError(a,b)},setInput:function(a){return this._input=a,this._more=this._less=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this},input:function(){var a=this._input[0];this.yytext+=a,this.yyleng++,this.match+=a,this.matched+=a;var b=a.match(/\n/);return b&&this.yylineno++,this._input=this._input.slice(1),a},unput:function(a){return this._input=a+this._input,this},more:function(){return this._more=!0,this},less:function(a){this._input=this.match.slice(a)+this._input},pastInput:function(){var a=this.matched.substr(0,this.matched.length-this.match.length);return(a.length>20?"...":"")+a.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var a=this.match;return a.length<20&&(a+=this._input.substr(0,20-a.length)),(a.substr(0,20)+(a.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var a=this.pastInput(),b=new Array(a.length+1).join("-");return a+this.upcomingInput()+"\n"+b+"^"},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var a,b,c,d,f;this._more||(this.yytext="",this.match="");for(var g=this._currentRules(),h=0;hb[0].length)||(b=c,d=h,this.options.flex));h++);return b?(f=b[0].match(/\n.*/g),f&&(this.yylineno+=f.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:f?f[f.length-1].length-1:this.yylloc.last_column+b[0].length},this.yytext+=b[0],this.match+=b[0],this.yyleng=this.yytext.length,this._more=!1,this._input=this._input.slice(b[0].length),this.matched+=b[0],a=this.performAction.call(this,this.yy,this,g[d],this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),a?a:void 0):""===this._input?this.EOF:(this.parseError("Lexical error on line "+(this.yylineno+1)+". Unrecognized text.\n"+this.showPosition(),{text:"",token:null,line:this.yylineno}),void 0)},lex:function(){var a=this.next();return"undefined"!=typeof a?a:this.lex()},begin:function(a){this.conditionStack.push(a)},popState:function(){return this.conditionStack.pop()},_currentRules:function(){return this.conditions[this.conditionStack[this.conditionStack.length-1]].rules},topState:function(){return this.conditionStack[this.conditionStack.length-2]},pushState:function(a){this.begin(a)}};return a.options={},a.performAction=function(a,b,c,d){switch(c){case 0:break;case 1:return 6;case 2:return b.yytext=b.yytext.substr(1,b.yyleng-2),4;case 3:return 17;case 4:return 18;case 5:return 23;case 6:return 24;case 7:return 22;case 8:return 21;case 9:return 10;case 10:return 11;case 11:return 8;case 12:return 14;case 13:return"INVALID"}},a.rules=[/^(?:\s+)/,/^(?:(-?([0-9]|[1-9][0-9]+))(\.[0-9]+)?([eE][-+]?[0-9]+)?\b)/,/^(?:"(?:\\[\\"bfnrt/]|\\u[a-fA-F0-9]{4}|[^\\\0-\x09\x0a-\x1f"])*")/,/^(?:\{)/,/^(?:\})/,/^(?:\[)/,/^(?:\])/,/^(?:,)/,/^(?::)/,/^(?:true\b)/,/^(?:false\b)/,/^(?:null\b)/,/^(?:$)/,/^(?:.)/],a.conditions={INITIAL:{rules:[0,1,2,3,4,5,6,7,8,9,10,11,12,13],inclusive:!0}},a}();return a.lexer=b,a}();"undefined"!=typeof require&&"undefined"!=typeof exports&&(exports.parser=jsonlint,exports.parse=function(){return jsonlint.parse.apply(jsonlint,arguments)},exports.main=function(a){if(!a[1])throw new Error("Usage: "+a[0]+" FILE");if("undefined"!=typeof process)var b=require("fs").readFileSync(require("path").join(process.cwd(),a[1]),"utf8");else var c=require("file").path(require("file").cwd()),b=c.join(a[1]).read({charset:"utf-8"});return exports.parser.parse(b)},"undefined"!=typeof module&&require.main===module&&exports.main("undefined"!=typeof process?process.argv.slice(1):require("system").args));
diff --git a/adit/mass_transfer/static/mass_transfer/vendor/codemirror/mode/javascript/javascript.min.js b/adit/mass_transfer/static/mass_transfer/vendor/codemirror/mode/javascript/javascript.min.js
new file mode 100644
index 000000000..c20781232
--- /dev/null
+++ b/adit/mass_transfer/static/mass_transfer/vendor/codemirror/mode/javascript/javascript.min.js
@@ -0,0 +1 @@
+!function(e){"object"==typeof exports&&"object"==typeof module?e(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],e):e(CodeMirror)}(function(rt){"use strict";rt.defineMode("javascript",function(e,l){var t,r,O,P,f=e.indentUnit,N=l.statementIndent,U=l.jsonld,o=l.json||U,W=!1!==l.trackScope,u=l.typescript,B=l.wordCharacters||/[\w$\xa1-\uffff]/,F=(e=n("keyword a"),t=n("keyword b"),r=n("keyword c"),O=n("keyword d"),P=n("operator"),{if:n("if"),while:e,with:e,else:t,do:t,try:t,finally:t,return:O,break:O,continue:O,new:n("new"),delete:r,void:r,throw:r,debugger:n("debugger"),var:n("var"),const:n("var"),let:n("var"),function:n("function"),catch:n("catch"),for:n("for"),switch:n("switch"),case:n("case"),default:n("default"),in:P,typeof:P,instanceof:P,true:e={type:"atom",style:"atom"},false:e,null:e,undefined:e,NaN:e,Infinity:e,this:n("this"),class:n("class"),super:n("atom"),yield:r,export:n("export"),import:n("import"),extends:r,await:r});function n(e){return{type:e,style:"keyword"}}var H,D,G=/[+\-*&%=<>!?|~^@]/,J=/^@(context|id|value|language|type|container|list|set|reverse|index|base|vocab|graph)"/;function i(e,t,r){return H=e,D=r,t}function d(e,t){var a,r=e.next();if('"'==r||"'"==r)return t.tokenize=(a=r,function(e,t){var r,n=!1;if(U&&"@"==e.peek()&&e.match(J))return t.tokenize=d,i("jsonld-keyword","meta");for(;null!=(r=e.next())&&(r!=a||n);)n=!n&&"\\"==r;return n||(t.tokenize=d),i("string","string")}),t.tokenize(e,t);if("."==r&&e.match(/^\d[\d_]*(?:[eE][+\-]?[\d_]+)?/))return i("number","number");if("."==r&&e.match(".."))return i("spread","meta");if(/[\[\]{}\(\),;\:\.]/.test(r))return i(r);if("="==r&&e.eat(">"))return i("=>","operator");if("0"==r&&e.match(/^(?:x[\dA-Fa-f_]+|o[0-7_]+|b[01_]+)n?/))return i("number","number");if(/\d/.test(r))return e.match(/^[\d_]*(?:n|(?:\.[\d_]*)?(?:[eE][+\-]?[\d_]+)?)?/),i("number","number");if("/"==r)return e.eat("*")?(t.tokenize=K)(e,t):e.eat("/")?(e.skipToEnd(),i("comment","comment")):tt(e,t,1)?(function(e){for(var t,r=!1,n=!1;null!=(t=e.next());){if(!r){if("/"==t&&!n)return;"["==t?n=!0:n&&"]"==t&&(n=!1)}r=!r&&"\\"==t}}(e),e.match(/^\b(([gimyus])(?![gimyus]*\2))+\b/),i("regexp","string-2")):(e.eat("="),i("operator","operator",e.current()));if("`"==r)return(t.tokenize=L)(e,t);if("#"==r&&"!"==e.peek())return e.skipToEnd(),i("meta","meta");if("#"==r&&e.eatWhile(B))return i("variable","property");if("<"==r&&e.match("!--")||"-"==r&&e.match("->")&&!/\S/.test(e.string.slice(0,e.start)))return e.skipToEnd(),i("comment","comment");if(G.test(r))return">"==r&&t.lexical&&">"==t.lexical.type||(e.eat("=")?"!"!=r&&"="!=r||e.eat("="):/[<>*+\-|&?]/.test(r)&&(e.eat(r),">"==r&&e.eat(r))),"?"==r&&e.eat(".")?i("."):i("operator","operator",e.current());if(B.test(r)){e.eatWhile(B);r=e.current();if("."!=t.lastType){if(F.propertyIsEnumerable(r))return i((t=F[r]).type,t.style,r);if("async"==r&&e.match(/^(\s|\/\*([^*]|\*(?!\/))*?\*\/)*[\[\(\w]/,!1))return i("async","keyword",r)}return i("variable","variable",r)}}function K(e,t){for(var r,n=!1;r=e.next();){if("/"==r&&n){t.tokenize=d;break}n="*"==r}return i("comment","comment")}function L(e,t){for(var r,n=!1;null!=(r=e.next());){if(!n&&("`"==r||"$"==r&&e.eat("{"))){t.tokenize=d;break}n=!n&&"\\"==r}return i("quasi","string-2",e.current())}function Q(e,t){t.fatArrowAt&&(t.fatArrowAt=null);var r=e.string.indexOf("=>",e.start);if(!(r<0)){!u||(n=/:\s*(?:\w+(?:<[^>]*>|\[\])?|\{[^}]*\})\s*$/.exec(e.string.slice(e.start,r)))&&(r=n.index);for(var n,a=0,i=!1,o=r-1;0<=o;--o){var c=e.string.charAt(o),s="([{}])".indexOf(c);if(0<=s&&s<3){if(!a){++o;break}if(0==--a){"("==c&&(i=!0);break}}else if(3<=s&&s<6)++a;else if(B.test(c))i=!0;else if(/["'\/`]/.test(c))for(;;--o){if(0==o)return;if(e.string.charAt(o-1)==c&&"\\"!=e.string.charAt(o-2)){o--;break}}else if(i&&!a){++o;break}}i&&!a&&(t.fatArrowAt=o)}}var R={atom:!0,number:!0,variable:!0,string:!0,regexp:!0,this:!0,import:!0,"jsonld-keyword":!0};function X(e,t,r,n,a,i){this.indented=e,this.column=t,this.type=r,this.prev=a,this.info=i,null!=n&&(this.align=n)}function Y(e,t,r,n,a){var i=e.cc;for(c.state=e,c.stream=a,c.marked=null,c.cc=i,c.style=t,e.lexical.hasOwnProperty("align")||(e.lexical.align=!0);;)if((i.length?i.pop():o?x:b)(r,n)){for(;i.length&&i[i.length-1].lex;)i.pop()();return c.marked?c.marked:"variable"==r&&function(e,t){if(W){for(var r=e.localVars;r;r=r.next)if(r.name==t)return 1;for(var n=e.context;n;n=n.prev)for(r=n.vars;r;r=r.next)if(r.name==t)return 1}}(e,n)?"variable-2":t}}var c={state:null,column:null,marked:null,cc:null};function s(){for(var e=arguments.length-1;0<=e;e--)c.cc.push(arguments[e])}function p(){return s.apply(null,arguments),!0}function Z(e,t){for(var r=t;r;r=r.next)if(r.name==e)return 1}function a(e){var t=c.state;if(c.marked="def",W){if(t.context)if("var"==t.lexical.info&&t.context&&t.context.block){var r=function e(t,r){{var n;return r?r.block?(n=e(t,r.prev))?n==r.prev?r:new te(n,r.vars,!0):null:Z(t,r.vars)?r:new te(r.prev,new re(t,r.vars),!1):null}}(e,t.context);if(null!=r)return void(t.context=r)}else if(!Z(e,t.localVars))return void(t.localVars=new re(e,t.localVars));l.globalVars&&!Z(e,t.globalVars)&&(t.globalVars=new re(e,t.globalVars))}}function ee(e){return"public"==e||"private"==e||"protected"==e||"abstract"==e||"readonly"==e}function te(e,t,r){this.prev=e,this.vars=t,this.block=r}function re(e,t){this.name=e,this.next=t}var ne=new re("this",new re("arguments",null));function m(){c.state.context=new te(c.state.context,c.state.localVars,!1),c.state.localVars=ne}function ae(){c.state.context=new te(c.state.context,c.state.localVars,!0),c.state.localVars=null}function k(){c.state.localVars=c.state.context.vars,c.state.context=c.state.context.prev}function v(n,a){function e(){var e=c.state,t=e.indented;if("stat"==e.lexical.type)t=e.lexical.indented;else for(var r=e.lexical;r&&")"==r.type&&r.align;r=r.prev)t=r.indented;e.lexical=new X(t,c.stream.column(),n,null,e.lexical,a)}return e.lex=!0,e}function y(){var e=c.state;e.lexical.prev&&(")"==e.lexical.type&&(e.indented=e.lexical.indented),e.lexical=e.lexical.prev)}function w(r){return function e(t){return t==r?p():";"==r||"}"==t||")"==t||"]"==t?s():p(e)}}function b(e,t){return"var"==e?p(v("vardef",t),qe,w(";"),y):"keyword a"==e?p(v("form"),oe,b,y):"keyword b"==e?p(v("form"),b,y):"keyword d"==e?c.stream.match(/^\s*$/,!1)?p():p(v("stat"),g,w(";"),y):"debugger"==e?p(w(";")):"{"==e?p(v("}"),ae,be,y,k):";"==e?p():"if"==e?("else"==c.state.lexical.info&&c.state.cc[c.state.cc.length-1]==y&&c.state.cc.pop()(),p(v("form"),oe,b,y,Oe)):"function"==e?p(q):"for"==e?p(v("form"),ae,Pe,b,k,y):"class"==e||u&&"interface"==t?(c.marked="keyword",p(v("form","class"==e?e:t),Fe,y)):"variable"==e?u&&"declare"==t?(c.marked="keyword",p(b)):u&&("module"==t||"enum"==t||"type"==t)&&c.stream.match(/^\s*\w/,!1)?(c.marked="keyword","enum"==t?p(Ze):"type"==t?p(We,w("operator"),z,w(";")):p(v("form"),T,w("{"),v("}"),be,y,y)):u&&"namespace"==t?(c.marked="keyword",p(v("form"),x,b,y)):u&&"abstract"==t?(c.marked="keyword",p(b)):p(v("stat"),me):"switch"==e?p(v("form"),oe,w("{"),v("}","switch"),ae,be,y,y,k):"case"==e?p(x,w(":")):"default"==e?p(w(":")):"catch"==e?p(v("form"),m,ie,b,y,k):"export"==e?p(v("stat"),Ge,y):"import"==e?p(v("stat"),Ke,y):"async"==e?p(b):"@"==t?p(x,b):s(v("stat"),x,w(";"),y)}function ie(e){if("("==e)return p(S,w(")"))}function x(e,t){return ce(e,t,!1)}function h(e,t){return ce(e,t,!0)}function oe(e){return"("!=e?s():p(v(")"),g,w(")"),y)}function ce(e,t,r){if(c.state.fatArrowAt==c.stream.start){var n=r?fe:le;if("("==e)return p(m,v(")"),V(S,")"),y,w("=>"),n,k);if("variable"==e)return s(m,T,w("=>"),n,k)}var a,n=r?M:j;return R.hasOwnProperty(e)?p(n):"function"==e?p(q,n):"class"==e||u&&"interface"==t?(c.marked="keyword",p(v("form"),Be,y)):"keyword c"==e||"async"==e?p(r?h:x):"("==e?p(v(")"),g,w(")"),y,n):"operator"==e||"spread"==e?p(r?h:x):"["==e?p(v("]"),Ye,y,n):"{"==e?we(ve,"}",null,n):"quasi"==e?s(se,n):"new"==e?p((a=r,function(e){return"."==e?p(a?pe:de):"variable"==e&&u?p(Ie,a?M:j):s(a?h:x)})):p()}function g(e){return e.match(/[;\}\)\],]/)?s():s(x)}function j(e,t){return","==e?p(g):M(e,t,!1)}function M(e,t,r){var n=0==r?j:M,a=0==r?x:h;return"=>"==e?p(m,r?fe:le,k):"operator"==e?/\+\+|--/.test(t)||u&&"!"==t?p(n):u&&"<"==t&&c.stream.match(/^([^<>]|<[^<>]*>)*>\s*\(/,!1)?p(v(">"),V(z,">"),y,n):"?"==t?p(x,w(":"),a):p(a):"quasi"==e?s(se,n):";"!=e?"("==e?we(h,")","call",n):"."==e?p(ke,n):"["==e?p(v("]"),g,w("]"),y,n):u&&"as"==t?(c.marked="keyword",p(z,n)):"regexp"==e?(c.state.lastType=c.marked="operator",c.stream.backUp(c.stream.pos-c.stream.start-1),p(a)):void 0:void 0}function se(e,t){return"quasi"!=e?s():"${"!=t.slice(t.length-2)?p(se):p(g,ue)}function ue(e){if("}"==e)return c.marked="string-2",c.state.tokenize=L,p(se)}function le(e){return Q(c.stream,c.state),s("{"==e?b:x)}function fe(e){return Q(c.stream,c.state),s("{"==e?b:h)}function de(e,t){if("target"==t)return c.marked="keyword",p(j)}function pe(e,t){if("target"==t)return c.marked="keyword",p(M)}function me(e){return":"==e?p(y,b):s(j,w(";"),y)}function ke(e){if("variable"==e)return c.marked="property",p()}function ve(e,t){return"async"==e?(c.marked="property",p(ve)):"variable"!=e&&"keyword"!=c.style?"number"==e||"string"==e?(c.marked=U?"property":c.style+" property",p(A)):"jsonld-keyword"==e?p(A):u&&ee(t)?(c.marked="keyword",p(ve)):"["==e?p(x,E,w("]"),A):"spread"==e?p(h,A):"*"==t?(c.marked="keyword",p(ve)):":"==e?s(A):void 0:(c.marked="property","get"==t||"set"==t?p(ye):(u&&c.state.fatArrowAt==c.stream.start&&(e=c.stream.match(/^\s*:\s*/,!1))&&(c.state.fatArrowAt=c.stream.pos+e[0].length),p(A)))}function ye(e){return"variable"!=e?s(A):(c.marked="property",p(q))}function A(e){return":"==e?p(h):"("==e?s(q):void 0}function V(n,a,i){function o(e,t){var r;return(i?-1"),z):"quasi"==e?s(Ve,I):void 0}function je(e){if("=>"==e)return p(z)}function Me(e){return e.match(/[\}\)\]]/)?p():","==e||";"==e?p(Me):s(Ae,Me)}function Ae(e,t){return"variable"==e||"keyword"==c.style?(c.marked="property",p(Ae)):"?"==t||"number"==e||"string"==e?p(Ae):":"==e?p(z):"["==e?p(w("variable"),xe,w("]"),Ae):"("==e?s(C,Ae):e.match(/[;\}\)\],]/)?void 0:p()}function Ve(e,t){return"quasi"!=e?s():"${"!=t.slice(t.length-2)?p(Ve):p(z,Ee)}function Ee(e){if("}"==e)return c.marked="string-2",c.state.tokenize=L,p(Ve)}function ze(e,t){return"variable"==e&&c.stream.match(/^\s*[?:]/,!1)||"?"==t?p(ze):":"==e?p(z):"spread"==e?p(ze):s(z)}function I(e,t){return"<"==t?p(v(">"),V(z,">"),y,I):"|"==t||"."==e||"&"==t?p(z):"["==e?p(z,w("]"),I):"extends"==t||"implements"==t?(c.marked="keyword",p(z)):"?"==t?p(z,w(":"),z):void 0}function Ie(e,t){if("<"==t)return p(v(">"),V(z,">"),y,I)}function Te(){return s(z,$e)}function $e(e,t){if("="==t)return p(z)}function qe(e,t){return"enum"==t?(c.marked="keyword",p(Ze)):s(T,E,$,_e)}function T(e,t){return u&&ee(t)?(c.marked="keyword",p(T)):"variable"==e?(a(t),p()):"spread"==e?p(T):"["==e?we(Se,"]"):"{"==e?we(Ce,"}"):void 0}function Ce(e,t){return"variable"!=e||c.stream.match(/^\s*:/,!1)?("variable"==e&&(c.marked="property"),"spread"==e?p(T):"}"==e?s():"["==e?p(x,w("]"),w(":"),Ce):p(w(":"),T,$)):(a(t),p($))}function Se(){return s(T,$)}function $(e,t){if("="==t)return p(h)}function _e(e){if(","==e)return p(qe)}function Oe(e,t){if("keyword b"==e&&"else"==t)return p(v("form","else"),b,y)}function Pe(e,t){return"await"==t?p(Pe):"("==e?p(v(")"),Ne,y):void 0}function Ne(e){return"var"==e?p(qe,Ue):("variable"==e?p:s)(Ue)}function Ue(e,t){return")"==e?p():";"==e?p(Ue):"in"==t||"of"==t?(c.marked="keyword",p(x,Ue)):s(x,Ue)}function q(e,t){return"*"==t?(c.marked="keyword",p(q)):"variable"==e?(a(t),p(q)):"("==e?p(m,v(")"),V(S,")"),y,he,b,k):u&&"<"==t?p(v(">"),V(Te,">"),y,q):void 0}function C(e,t){return"*"==t?(c.marked="keyword",p(C)):"variable"==e?(a(t),p(C)):"("==e?p(m,v(")"),V(S,")"),y,he,k):u&&"<"==t?p(v(">"),V(Te,">"),y,C):void 0}function We(e,t){return"keyword"==e||"variable"==e?(c.marked="type",p(We)):"<"==t?p(v(">"),V(Te,">"),y):void 0}function S(e,t){return"@"==t&&p(x,S),"spread"==e?p(S):u&&ee(t)?(c.marked="keyword",p(S)):u&&"this"==e?p(E,$):s(T,E,$)}function Be(e,t){return("variable"==e?Fe:He)(e,t)}function Fe(e,t){if("variable"==e)return a(t),p(He)}function He(e,t){return"<"==t?p(v(">"),V(Te,">"),y,He):"extends"==t||"implements"==t||u&&","==e?("implements"==t&&(c.marked="keyword"),p(u?z:x,He)):"{"==e?p(v("}"),_,y):void 0}function _(e,t){return"async"==e||"variable"==e&&("static"==t||"get"==t||"set"==t||u&&ee(t))&&c.stream.match(/^\s+#?[\w$\xa1-\uffff]/,!1)?(c.marked="keyword",p(_)):"variable"==e||"keyword"==c.style?(c.marked="property",p(De,_)):"number"==e||"string"==e?p(De,_):"["==e?p(x,E,w("]"),De,_):"*"==t?(c.marked="keyword",p(_)):u&&"("==e?s(C,_):";"==e||","==e?p(_):"}"==e?p():"@"==t?p(x,_):void 0}function De(e,t){if("!"==t)return p(De);if("?"==t)return p(De);if(":"==e)return p(z,$);if("="==t)return p(h);e=c.state.lexical.prev;return s(e&&"interface"==e.info?C:q)}function Ge(e,t){return"*"==t?(c.marked="keyword",p(Xe,w(";"))):"default"==t?(c.marked="keyword",p(x,w(";"))):"{"==e?p(V(Je,"}"),Xe,w(";")):s(b)}function Je(e,t){return"as"==t?(c.marked="keyword",p(w("variable"))):"variable"==e?s(h,Je):void 0}function Ke(e){return"string"==e?p():"("==e?s(x):"."==e?s(j):s(Le,Qe,Xe)}function Le(e,t){return"{"==e?we(Le,"}"):("variable"==e&&a(t),"*"==t&&(c.marked="keyword"),p(Re))}function Qe(e){if(","==e)return p(Le,Qe)}function Re(e,t){if("as"==t)return c.marked="keyword",p(Le)}function Xe(e,t){if("from"==t)return c.marked="keyword",p(x)}function Ye(e){return"]"==e?p():s(V(h,"]"))}function Ze(){return s(v("form"),T,w("{"),v("}"),V(et,"}"),y,y)}function et(){return s(T,$)}function tt(e,t,r){return t.tokenize==d&&/^(?:operator|sof|keyword [bcd]|case|new|export|default|spread|[\[{}\(,;:]|=>)$/.test(t.lastType)||"quasi"==t.lastType&&/\{\s*$/.test(e.string.slice(0,e.pos-(r||0)))}return m.lex=ae.lex=!0,y.lex=k.lex=!0,{startState:function(e){e={tokenize:d,lastType:"sof",cc:[],lexical:new X((e||0)-f,0,"block",!1),localVars:l.localVars,context:l.localVars&&new te(null,null,!1),indented:e||0};return l.globalVars&&"object"==typeof l.globalVars&&(e.globalVars=l.globalVars),e},token:function(e,t){if(e.sol()&&(t.lexical.hasOwnProperty("align")||(t.lexical.align=!1),t.indented=e.indentation(),Q(e,t)),t.tokenize!=K&&e.eatSpace())return null;var r=t.tokenize(e,t);return"comment"==H?r:(t.lastType="operator"!=H||"++"!=D&&"--"!=D?H:"incdec",Y(t,r,H,D,e))},indent:function(e,t){if(e.tokenize==K||e.tokenize==L)return rt.Pass;if(e.tokenize!=d)return 0;var r,n=t&&t.charAt(0),a=e.lexical;if(!/^\s*else\b/.test(t))for(var i=e.cc.length-1;0<=i;--i){var o=e.cc[i];if(o==y)a=a.prev;else if(o!=Oe&&o!=k)break}for(;("stat"==a.type||"form"==a.type)&&("}"==n||(r=e.cc[e.cc.length-1])&&(r==j||r==M)&&!/^[,\.=+\-*:?[\(]/.test(t));)a=a.prev;var c,s=(a=N&&")"==a.type&&"stat"==a.prev.type?a.prev:a).type,u=n==s;return"vardef"==s?a.indented+("operator"==e.lastType||","==e.lastType?a.info.length+1:0):"form"==s&&"{"==n?a.indented:"form"==s?a.indented+f:"stat"==s?a.indented+(s=t,"operator"==(c=e).lastType||","==c.lastType||G.test(s.charAt(0))||/[,.]/.test(s.charAt(0))?N||f:0):"switch"!=a.info||u||0==l.doubleIndentSwitch?a.align?a.column+(u?0:1):a.indented+(u?0:f):a.indented+(/^(?:case|default)\b/.test(t)?f:2*f)},electricInput:/^\s*(?:case .*?:|default:|\{|\})$/,blockCommentStart:o?null:"/*",blockCommentEnd:o?null:"*/",blockCommentContinue:o?null:" * ",lineComment:o?null:"//",fold:"brace",closeBrackets:"()[]{}''\"\"``",helperType:o?"json":"javascript",jsonldMode:U,jsonMode:o,expressionAllowed:tt,skipExpression:function(e){Y(e,"atom","atom","true",new rt.StringStream("",2,null))}}}),rt.registerHelper("wordChars","javascript",/[\w$]/),rt.defineMIME("text/javascript","javascript"),rt.defineMIME("text/ecmascript","javascript"),rt.defineMIME("application/javascript","javascript"),rt.defineMIME("application/x-javascript","javascript"),rt.defineMIME("application/ecmascript","javascript"),rt.defineMIME("application/json",{name:"javascript",json:!0}),rt.defineMIME("application/x-json",{name:"javascript",json:!0}),rt.defineMIME("application/manifest+json",{name:"javascript",json:!0}),rt.defineMIME("application/ld+json",{name:"javascript",jsonld:!0}),rt.defineMIME("text/typescript",{name:"javascript",typescript:!0}),rt.defineMIME("application/typescript",{name:"javascript",typescript:!0})});
\ No newline at end of file
diff --git a/adit/mass_transfer/tables.py b/adit/mass_transfer/tables.py
new file mode 100644
index 000000000..198bc608e
--- /dev/null
+++ b/adit/mass_transfer/tables.py
@@ -0,0 +1,64 @@
+import django_tables2 as tables
+from django.utils.html import format_html
+
+from adit.core.tables import DicomTaskTable, TransferJobTable
+
+from .models import MassTransferJob, MassTransferTask, MassTransferVolume
+from .templatetags.mass_transfer_extras import volume_status_css_class
+
+
+class MassTransferJobTable(TransferJobTable):
+ class Meta(TransferJobTable.Meta):
+ model = MassTransferJob
+
+
+class MassTransferTaskTable(DicomTaskTable):
+ class Meta(DicomTaskTable.Meta):
+ model = MassTransferTask
+
+
+class MassTransferVolumeTable(tables.Table):
+ status = tables.Column(verbose_name="Status")
+ study_info = tables.Column(verbose_name="Study Info", empty_values=(), orderable=False)
+ modality = tables.Column(verbose_name="Modality")
+ series_number = tables.Column(verbose_name="Series #")
+ series_description = tables.Column(verbose_name="Series Description")
+ institution_name = tables.Column(verbose_name="Institution")
+ number_of_images = tables.Column(verbose_name="# Images")
+ log = tables.Column(verbose_name="Reason", attrs={"td": {"class": "small"}})
+
+ class Meta:
+ model = MassTransferVolume
+ fields = (
+ "status",
+ "study_info",
+ "modality",
+ "series_number",
+ "series_description",
+ "institution_name",
+ "number_of_images",
+ "log",
+ )
+ order_by = ("status", "study_datetime")
+ empty_text = "No volumes to show"
+ attrs = {"class": "table table-bordered table-hover table-sm"}
+
+ def render_status(self, value, record):
+ css_class = volume_status_css_class(record.status)
+ return format_html(
+ '{}', css_class, record.get_status_display()
+ )
+
+ def render_study_info(self, record):
+ desc = record.study_description or "—"
+ dt = record.study_datetime.strftime("%Y-%m-%d") if record.study_datetime else ""
+ return format_html("{}
{}", desc, dt)
+
+ def render_series_number(self, value):
+ return value if value is not None else "—"
+
+ def render_series_description(self, value):
+ return value or "—"
+
+ def render_log(self, value):
+ return value or "—"
diff --git a/adit/mass_transfer/tasks.py b/adit/mass_transfer/tasks.py
new file mode 100644
index 000000000..cb59fa8db
--- /dev/null
+++ b/adit/mass_transfer/tasks.py
@@ -0,0 +1,68 @@
+import logging
+
+from django import db
+from procrastinate import JobContext, RetryStrategy
+from procrastinate.contrib.django import app
+
+from adit.core.models import DicomJob, DicomTask
+from adit.core.tasks import DICOM_TASK_RETRY_STRATEGY, _run_dicom_task
+
+logger = logging.getLogger(__name__)
+
+
+# Separate task function for mass transfer on a dedicated queue so it does not
+# starve batch/selective transfers. Mass transfer tasks process an entire
+# partition (discovery + export + convert) and can run for hours, so the
+# pebble process timeout is set to 24 hours as a safety net. Individual DICOM
+# operations are still protected by Stamina / pynetdicom-level timeouts.
+MASS_TRANSFER_PROCESS_TIMEOUT = 24 * 60 * 60 # 24 hours
+
+
+@app.task(
+ queue="mass_transfer",
+ pass_context=True,
+ retry=DICOM_TASK_RETRY_STRATEGY,
+)
+def process_mass_transfer_task(context: JobContext, model_label: str, task_id: int):
+ _run_dicom_task(context, model_label, task_id, process_timeout=MASS_TRANSFER_PROCESS_TIMEOUT)
+
+
+@app.task(queue="default", retry=RetryStrategy(max_attempts=3, wait=10))
+def queue_mass_transfer_tasks(job_id: int):
+ """Queues all pending tasks for a mass transfer job.
+
+ Runs on the default worker so that the HTTP view returns immediately
+ instead of blocking on thousands of individual defer() calls.
+ """
+ from .models import MassTransferJob
+
+ try:
+ job = MassTransferJob.objects.get(pk=job_id)
+ except MassTransferJob.DoesNotExist:
+ logger.info("MassTransferJob %d no longer exists; skipping queue.", job_id)
+ return
+
+ if job.status != DicomJob.Status.PENDING:
+ logger.warning(
+ "MassTransferJob %d has status %s (expected PENDING); skipping queue.",
+ job_id,
+ job.status,
+ )
+ return
+
+ try:
+ for mass_task in job.tasks.filter(
+ status=DicomTask.Status.PENDING,
+ queued_job__isnull=True, # Skip tasks already queued (idempotency guard)
+ ):
+ try:
+ mass_task.queue_pending_task()
+ except Exception:
+ logger.exception(
+ "Failed to queue MassTransferTask %d for job %d",
+ mass_task.pk,
+ job_id,
+ )
+ raise
+ finally:
+ db.close_old_connections()
diff --git a/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_detail.html b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_detail.html
new file mode 100644
index 000000000..d233459ca
--- /dev/null
+++ b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_detail.html
@@ -0,0 +1,76 @@
+{% extends "mass_transfer/mass_transfer_layout.html" %}
+{% load crispy from crispy_forms_tags %}
+{% load render_table from django_tables2 %}
+{% load bootstrap_icon from common_extras %}
+{% load dicom_job_status_css_class from core_extras %}
+{% load job_control_panel from mass_transfer_extras %}
+{% block title %}
+ Mass Transfer Job
+{% endblock title %}
+{% block heading %}
+
+
+
+ {% bootstrap_icon "download" %}
+ Export CSV
+
+
+ {% bootstrap_icon "list" %}
+ Job List
+
+
+
+{% endblock heading %}
+{% block content %}
+
+ - Job ID
+ - {{ job.id }}
+ - Created At
+ - {{ job.created }}
+ {% if user.is_staff %}
+ - Created By
+ - {{ job.owner }}
+ {% endif %}
+ - Source
+ - {{ job.tasks.first.source }}
+ - Destination
+ - {{ job.tasks.first.destination }}
+ - Start Date
+ - {{ job.start_date }}
+ - End Date
+ - {{ job.end_date }}
+ - Granularity
+ - {{ job.get_partition_granularity_display }}
+ - Pseudonymization
+ - {% if job.pseudonymize %}Enabled{% if job.pseudonym_salt %} (linked){% else %} (random, no linking){% endif %}{% else %}Disabled{% endif %}
+ {% if job.pseudonym_salt %}
+ - Pseudonym Salt
+ - {{ job.pseudonym_salt }}
+ {% endif %}
+ - Filters
+ -
+ {% if job.filters_json %}
+
{{ job.filters_json_pretty }}
+ {% else %}
+ —
+ {% endif %}
+
+ - Processed Tasks
+ - {{ job.processed_tasks.count }} of {{ job.tasks.count }}
+ - Status
+ -
+
+ {{ job.get_status_display }}
+
+
+ - Message
+ - {{ job.message|default:"—" }}
+
+
+
+ {% crispy filter.form %}
+
+
+ {% render_table table %}
+ {% job_control_panel %}
+{% endblock content %}
diff --git a/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_form.html b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_form.html
new file mode 100644
index 000000000..4e218886d
--- /dev/null
+++ b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_form.html
@@ -0,0 +1,77 @@
+{% extends "mass_transfer/mass_transfer_layout.html" %}
+{% load crispy from crispy_forms_tags %}
+{% load bootstrap_icon from common_extras %}
+{% block title %}
+ New Mass Transfer Job
+{% endblock title %}
+{% block css %}
+ {{ block.super }}
+ {{ form.media.css }}
+
+{% endblock css %}
+{% block heading %}
+
+
+
+ {% bootstrap_icon "list" %}
+ Previous Jobs
+
+
+
+{% endblock heading %}
+{% block content %}
+ {% crispy form %}
+{% endblock content %}
+{% block script %}
+ {{ block.super }}
+ {{ form.media.js }}
+{% endblock script %}
diff --git a/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_list.html b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_list.html
new file mode 100644
index 000000000..897209fee
--- /dev/null
+++ b/adit/mass_transfer/templates/mass_transfer/mass_transfer_job_list.html
@@ -0,0 +1,18 @@
+{% extends "mass_transfer/mass_transfer_layout.html" %}
+{% load bootstrap_icon from common_extras %}
+{% block title %}
+ Mass Transfer Jobs
+{% endblock title %}
+{% block heading %}
+
+
+
+ {% bootstrap_icon "plus-lg" %}
+ Create New Job
+
+
+
+{% endblock heading %}
+{% block content %}
+ {% include "core/_dicom_job_table.html" %}
+{% endblock content %}
diff --git a/adit/mass_transfer/templates/mass_transfer/mass_transfer_layout.html b/adit/mass_transfer/templates/mass_transfer/mass_transfer_layout.html
new file mode 100644
index 000000000..ed8d0ed76
--- /dev/null
+++ b/adit/mass_transfer/templates/mass_transfer/mass_transfer_layout.html
@@ -0,0 +1,12 @@
+{% extends "core/core_layout.html" %}
+{% load static from static %}
+{% block css %}
+ {{ block.super }}
+
+{% endblock css %}
+{% block script %}
+ {{ block.super }}
+
+{% endblock script %}
diff --git a/adit/mass_transfer/templates/mass_transfer/mass_transfer_task_detail.html b/adit/mass_transfer/templates/mass_transfer/mass_transfer_task_detail.html
new file mode 100644
index 000000000..ed587f814
--- /dev/null
+++ b/adit/mass_transfer/templates/mass_transfer/mass_transfer_task_detail.html
@@ -0,0 +1,46 @@
+{% extends "mass_transfer/mass_transfer_layout.html" %}
+{% load dicom_task_status_css_class from core_extras %}
+{% load task_control_panel from mass_transfer_extras %}
+{% load render_table from django_tables2 %}
+{% load crispy from crispy_forms_tags %}
+{% load bootstrap_icon from common_extras %}
+{% block title %}
+ Mass Transfer Task
+{% endblock title %}
+{% block heading %}
+
+
+
+ {% bootstrap_icon "arrow-left" %}
+ Back to Job
+
+
+
+{% endblock heading %}
+{% block content %}
+
+ - Task ID
+ - {{ task.id }}
+ - Partition
+ - {{ task.partition_key }}
+ - Window
+ - {{ task.partition_start }} – {{ task.partition_end }}
+ - Status
+ -
+
+ {{ task.get_status_display }}
+
+
+ - Message
+ - {{ task.message|default:"—" }}
+ - Log
+ {{ task.log|default:"" }}
+
+
+
+ {% crispy filter.form %}
+
+
+ {% render_table table %}
+ {% task_control_panel %}
+{% endblock content %}
diff --git a/adit/mass_transfer/templatetags/__init__.py b/adit/mass_transfer/templatetags/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/adit/mass_transfer/templatetags/mass_transfer_extras.py b/adit/mass_transfer/templatetags/mass_transfer_extras.py
new file mode 100644
index 000000000..9d846645b
--- /dev/null
+++ b/adit/mass_transfer/templatetags/mass_transfer_extras.py
@@ -0,0 +1,44 @@
+from typing import Any
+
+from django.template import Library
+
+from ..models import MassTransferVolume
+
+register = Library()
+
+
+@register.filter
+def volume_status_css_class(status: str) -> str:
+ css_classes = {
+ MassTransferVolume.Status.PENDING: "text-secondary",
+ MassTransferVolume.Status.EXPORTED: "text-info",
+ MassTransferVolume.Status.CONVERTED: "text-success",
+ MassTransferVolume.Status.SKIPPED: "text-muted",
+ MassTransferVolume.Status.ERROR: "text-danger",
+ }
+ return css_classes.get(MassTransferVolume.Status(status), "text-secondary")
+
+
+@register.inclusion_tag("core/_job_detail_control_panel.html", takes_context=True)
+def job_control_panel(context: dict[str, Any]) -> dict[str, Any]:
+ return {
+ "job_delete_url": "mass_transfer_job_delete",
+ "job_verify_url": "mass_transfer_job_verify",
+ "job_cancel_url": "mass_transfer_job_cancel",
+ "job_resume_url": "mass_transfer_job_resume",
+ "job_retry_url": "mass_transfer_job_retry",
+ "job_restart_url": "mass_transfer_job_restart",
+ "user": context["user"],
+ "job": context["job"],
+ }
+
+
+@register.inclusion_tag("core/_task_detail_control_panel.html", takes_context=True)
+def task_control_panel(context: dict[str, Any]) -> dict[str, Any]:
+ return {
+ "task_delete_url": "mass_transfer_task_delete",
+ "task_reset_url": "mass_transfer_task_reset",
+ "task_kill_url": "mass_transfer_task_kill",
+ "user": context["user"],
+ "task": context["task"],
+ }
diff --git a/adit/mass_transfer/tests/__init__.py b/adit/mass_transfer/tests/__init__.py
new file mode 100644
index 000000000..9758d0118
--- /dev/null
+++ b/adit/mass_transfer/tests/__init__.py
@@ -0,0 +1 @@
+# Tests for mass_transfer app.
diff --git a/adit/mass_transfer/tests/acceptance/__init__.py b/adit/mass_transfer/tests/acceptance/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/adit/mass_transfer/tests/acceptance/conftest.py b/adit/mass_transfer/tests/acceptance/conftest.py
new file mode 100644
index 000000000..92e966529
--- /dev/null
+++ b/adit/mass_transfer/tests/acceptance/conftest.py
@@ -0,0 +1,5 @@
+import os
+
+# Workaround to make playwright work with Django
+# see https://github.com/microsoft/playwright-pytest/issues/29#issuecomment-731515676
+os.environ.setdefault("DJANGO_ALLOW_ASYNC_UNSAFE", "true")
diff --git a/adit/mass_transfer/tests/acceptance/test_mass_transfer.py b/adit/mass_transfer/tests/acceptance/test_mass_transfer.py
new file mode 100644
index 000000000..97de29edd
--- /dev/null
+++ b/adit/mass_transfer/tests/acceptance/test_mass_transfer.py
@@ -0,0 +1,201 @@
+import json
+import tempfile
+from pathlib import Path
+
+import nibabel as nib
+import pytest
+from adit_radis_shared.common.utils.testing_helpers import (
+ add_permission,
+ add_user_to_group,
+ create_and_login_example_user,
+ run_worker_once,
+)
+from playwright.sync_api import Page, expect
+from pytest_django.live_server_helper import LiveServer
+
+from adit.core.factories import DicomFolderFactory
+from adit.core.utils.auth_utils import grant_access
+from adit.core.utils.testing_helpers import setup_dicomweb_orthancs, setup_dimse_orthancs
+from adit.mass_transfer.models import MassTransferJob
+from adit.mass_transfer.utils.testing_helpers import create_mass_transfer_group
+
+FILTERS_CT_ONLY = json.dumps([{"modality": "CT"}])
+
+
+def _fill_mass_transfer_form(
+ page: Page,
+ *,
+ source_label: str = "DICOM Server Orthanc Test Server 1",
+ destination_label: str = "DICOM Server Orthanc Test Server 2",
+ start_date: str = "2018-08-20",
+ end_date: str = "2018-08-20",
+ pseudonymize: bool = True,
+ convert_to_nifti: bool = False,
+ filters_json: str = FILTERS_CT_ONLY,
+):
+ page.get_by_label("Source").select_option(label=source_label)
+ page.get_by_label("Destination").select_option(label=destination_label)
+ page.get_by_label("Start date").fill(start_date)
+ page.get_by_label("End date").fill(end_date)
+
+ pseudonymize_checkbox = page.get_by_label("Pseudonymize")
+ if pseudonymize and not pseudonymize_checkbox.is_checked():
+ pseudonymize_checkbox.click(force=True)
+ elif not pseudonymize and pseudonymize_checkbox.is_checked():
+ pseudonymize_checkbox.click(force=True)
+
+ if convert_to_nifti:
+ page.get_by_label("Convert to NIfTI").click(force=True)
+
+ # Set filters in CodeMirror editor
+ page.evaluate(
+ """(value) => {
+ const cm = document.querySelector('.CodeMirror').CodeMirror;
+ cm.setValue(value);
+ }""",
+ filters_json,
+ )
+
+ page.locator('input:has-text("Create Job")').click()
+
+
+def _run_mass_transfer_workers():
+ # First run: processes queue_mass_transfer_tasks on default queue
+ run_worker_once()
+ # Second run: processes process_mass_transfer_task on mass_transfer queue
+ run_worker_once()
+
+
+def _setup_orthancs(transfer_protocol: str):
+ if transfer_protocol == "dicomweb":
+ return setup_dicomweb_orthancs()
+ elif transfer_protocol == "c-move":
+ return setup_dimse_orthancs(cget_enabled=False)
+ else:
+ return setup_dimse_orthancs()
+
+
+@pytest.mark.acceptance
+@pytest.mark.order("last")
+@pytest.mark.django_db(transaction=True)
+@pytest.mark.parametrize("transfer_protocol", ["c-move", "c-get", "dicomweb"])
+def test_unpseudonymized_mass_transfer_to_server(
+ page: Page, live_server: LiveServer, transfer_protocol: str
+):
+ user = create_and_login_example_user(page, live_server.url)
+ group = create_mass_transfer_group()
+ add_user_to_group(user, group)
+ add_permission(group, MassTransferJob, "can_transfer_unpseudonymized")
+
+ orthancs = _setup_orthancs(transfer_protocol)
+ grant_access(group, orthancs[0], source=True)
+ grant_access(group, orthancs[1], destination=True)
+
+ page.goto(live_server.url + "/mass-transfer/jobs/new/")
+ _fill_mass_transfer_form(page, pseudonymize=False)
+
+ _run_mass_transfer_workers()
+ page.reload()
+
+ expect(page.locator('dl:has-text("Success")')).to_be_visible()
+
+
+@pytest.mark.acceptance
+@pytest.mark.order("last")
+@pytest.mark.django_db(transaction=True)
+@pytest.mark.parametrize("transfer_protocol", ["c-move", "c-get", "dicomweb"])
+def test_pseudonymized_mass_transfer_to_server(
+ page: Page, live_server: LiveServer, transfer_protocol: str
+):
+ user = create_and_login_example_user(page, live_server.url)
+ group = create_mass_transfer_group()
+ add_user_to_group(user, group)
+
+ orthancs = _setup_orthancs(transfer_protocol)
+ grant_access(group, orthancs[0], source=True)
+ grant_access(group, orthancs[1], destination=True)
+
+ page.goto(live_server.url + "/mass-transfer/jobs/new/")
+ _fill_mass_transfer_form(page, pseudonymize=True)
+
+ _run_mass_transfer_workers()
+ page.reload()
+
+ expect(page.locator('dl:has-text("Success")')).to_be_visible()
+
+
+@pytest.mark.acceptance
+@pytest.mark.order("last")
+@pytest.mark.django_db(transaction=True)
+@pytest.mark.parametrize("transfer_protocol", ["c-move", "c-get", "dicomweb"])
+def test_mass_transfer_to_folder(
+ page: Page, live_server: LiveServer, transfer_protocol: str
+):
+ user = create_and_login_example_user(page, live_server.url)
+ group = create_mass_transfer_group()
+ add_user_to_group(user, group)
+ add_permission(group, MassTransferJob, "can_transfer_unpseudonymized")
+
+ orthancs = _setup_orthancs(transfer_protocol)
+ grant_access(group, orthancs[0], source=True)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ download_folder = DicomFolderFactory.create(name="Downloads", path=temp_dir)
+ grant_access(group, download_folder, destination=True)
+
+ page.goto(live_server.url + "/mass-transfer/jobs/new/")
+ _fill_mass_transfer_form(
+ page,
+ destination_label="DICOM Folder Downloads",
+ pseudonymize=False,
+ )
+
+ _run_mass_transfer_workers()
+ page.reload()
+
+ expect(page.locator('dl:has-text("Success")')).to_be_visible()
+
+ # Verify DICOM files were written to disk
+ dcm_files = list(Path(temp_dir).glob("**/*.dcm"))
+ assert len(dcm_files) > 0, "No DICOM files were written to the output folder."
+
+
+@pytest.mark.acceptance
+@pytest.mark.order("last")
+@pytest.mark.django_db(transaction=True)
+@pytest.mark.parametrize("transfer_protocol", ["c-move", "c-get", "dicomweb"])
+def test_mass_transfer_to_folder_with_nifti_conversion(
+ page: Page, live_server: LiveServer, transfer_protocol: str
+):
+ user = create_and_login_example_user(page, live_server.url)
+ group = create_mass_transfer_group()
+ add_user_to_group(user, group)
+ add_permission(group, MassTransferJob, "can_transfer_unpseudonymized")
+
+ orthancs = _setup_orthancs(transfer_protocol)
+ grant_access(group, orthancs[0], source=True)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ download_folder = DicomFolderFactory.create(name="Downloads", path=temp_dir)
+ grant_access(group, download_folder, destination=True)
+
+ page.goto(live_server.url + "/mass-transfer/jobs/new/")
+ _fill_mass_transfer_form(
+ page,
+ destination_label="DICOM Folder Downloads",
+ pseudonymize=False,
+ convert_to_nifti=True,
+ )
+
+ _run_mass_transfer_workers()
+ page.reload()
+
+ expect(page.locator('dl:has-text("Success")')).to_be_visible()
+
+ # Verify NIfTI files were generated
+ nifti_files = list(Path(temp_dir).glob("**/*.nii*"))
+ assert len(nifti_files) > 0, "No NIfTI files were generated."
+
+ for nifti_file in nifti_files:
+ img = nib.load(nifti_file) # type: ignore
+ assert img is not None, f"Invalid NIfTI file: {nifti_file}"
diff --git a/adit/mass_transfer/tests/test_forms.py b/adit/mass_transfer/tests/test_forms.py
new file mode 100644
index 000000000..d7db6f434
--- /dev/null
+++ b/adit/mass_transfer/tests/test_forms.py
@@ -0,0 +1,390 @@
+import json
+
+import pytest
+from adit_radis_shared.accounts.factories import GroupFactory, UserFactory
+from adit_radis_shared.common.utils.testing_helpers import add_user_to_group
+
+from adit.core.factories import DicomFolderFactory, DicomServerFactory
+from adit.core.utils.auth_utils import grant_access
+
+from ..forms import MassTransferJobForm
+
+
+@pytest.fixture
+def form_env():
+ """Create a user, source server, destination folder, and grant access."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+ return {"user": user, "source": source, "destination": destination}
+
+
+def _make_form(form_env, **overrides):
+ """Build a MassTransferJobForm with sensible defaults, applying overrides."""
+ data = {
+ "source": form_env["source"].pk,
+ "destination": form_env["destination"].pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ }
+ data.update(overrides)
+ return MassTransferJobForm(data=data, user=form_env["user"])
+
+
+@pytest.mark.django_db
+def test_clean_clears_salt_when_pseudonymize_unchecked():
+ """When pseudonymize is unchecked, the salt should be cleared."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "should-be-cleared",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+ assert form.cleaned_data["pseudonym_salt"] == ""
+
+
+@pytest.mark.django_db
+def test_clean_keeps_salt_when_pseudonymize_checked():
+ """When pseudonymize is checked, the salt should be preserved."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": True,
+ "pseudonym_salt": "my-custom-salt",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+ assert form.cleaned_data["pseudonym_salt"] == "my-custom-salt"
+
+
+@pytest.mark.django_db
+def test_clean_allows_empty_salt_with_pseudonymize_for_random_mode():
+ """Pseudonymize checked with empty salt = random pseudonyms (no linking)."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": True,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+ assert form.cleaned_data["pseudonym_salt"] == ""
+ assert form.cleaned_data["pseudonymize"] is True
+
+
+@pytest.mark.django_db
+def test_clean_destination_accepts_server():
+ """Server destinations should be accepted."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomServerFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+
+
+@pytest.mark.django_db
+def test_clean_destination_accepts_folder():
+ """Folder destinations should still be accepted (regression guard)."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+
+
+@pytest.mark.django_db
+def test_clean_clears_nifti_with_server_destination():
+ """NIfTI conversion should be silently cleared when destination is a server."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomServerFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "convert_to_nifti": True,
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+ assert form.cleaned_data["convert_to_nifti"] is False
+
+
+@pytest.mark.django_db
+def test_clean_allows_nifti_with_folder_destination():
+ """NIfTI conversion should be allowed when destination is a folder."""
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "convert_to_nifti": True,
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert form.is_valid(), form.errors
+ assert form.cleaned_data["convert_to_nifti"] is True
+
+
+# --- clean_filters_json tests ---
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_invalid_json(form_env):
+ form = _make_form(form_env, filters_json="{not valid json")
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_non_array(form_env):
+ form = _make_form(form_env, filters_json=json.dumps({"modality": "CT"}))
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_empty_array(form_env):
+ form = _make_form(form_env, filters_json=json.dumps([]))
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_non_dict_item(form_env):
+ form = _make_form(form_env, filters_json=json.dumps(["not a dict"]))
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_negative_age(form_env):
+ form = _make_form(form_env, filters_json=json.dumps([{"min_age": -5}]))
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_inverted_age_range(form_env):
+ form = _make_form(form_env, filters_json=json.dumps([{"min_age": 90, "max_age": 18}]))
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_extra_fields(form_env):
+ form = _make_form(
+ form_env, filters_json=json.dumps([{"modality": "CT", "unknown_field": "x"}])
+ )
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+# --- clean / clean_source validation tests ---
+
+
+@pytest.mark.django_db
+def test_clean_rejects_end_date_before_start_date(form_env):
+ form = _make_form(form_env, start_date="2024-06-01", end_date="2024-01-01")
+ assert not form.is_valid()
+ assert "__all__" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_source_rejects_folder():
+ user = UserFactory.create()
+ source_folder = DicomFolderFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ grant_access(group, source_folder, source=True)
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source_folder.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert not form.is_valid()
+ assert "source" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_min_instances_valid(form_env):
+ form = _make_form(
+ form_env,
+ filters_json=json.dumps([{"modality": "CT", "min_number_of_series_related_instances": 5}]),
+ )
+ assert form.is_valid(), form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_min_instances_zero_rejected(form_env):
+ form = _make_form(
+ form_env,
+ filters_json=json.dumps([{"modality": "CT", "min_number_of_series_related_instances": 0}]),
+ )
+ assert not form.is_valid()
+ assert "filters_json" in form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_min_instances_null_accepted(form_env):
+ form = _make_form(
+ form_env,
+ filters_json=json.dumps(
+ [{"modality": "CT", "min_number_of_series_related_instances": None}]
+ ),
+ )
+ assert form.is_valid(), form.errors
+
+
+@pytest.mark.django_db
+def test_clean_filters_json_min_instances_omitted_accepted(form_env):
+ form = _make_form(form_env, filters_json=json.dumps([{"modality": "CT"}]))
+ assert form.is_valid(), form.errors
+
+
+@pytest.mark.django_db
+def test_clean_source_rejects_unauthorized_server():
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ group = GroupFactory.create()
+ add_user_to_group(user, group)
+ # Only grant destination access, not source
+ grant_access(group, destination, destination=True)
+
+ form = MassTransferJobForm(
+ data={
+ "source": source.pk,
+ "destination": destination.pk,
+ "start_date": "2024-01-01",
+ "end_date": "2024-01-03",
+ "partition_granularity": "daily",
+ "pseudonymize": False,
+ "pseudonym_salt": "",
+ "filters_json": json.dumps([{"modality": "CT"}]),
+ },
+ user=user,
+ )
+ assert not form.is_valid()
+ assert "source" in form.errors
diff --git a/adit/mass_transfer/tests/test_partitions.py b/adit/mass_transfer/tests/test_partitions.py
new file mode 100644
index 000000000..f82198c55
--- /dev/null
+++ b/adit/mass_transfer/tests/test_partitions.py
@@ -0,0 +1,26 @@
+from datetime import date
+
+from adit.mass_transfer.utils.partitions import build_partitions
+
+
+def test_build_partitions_daily():
+ windows = build_partitions(date(2024, 1, 1), date(2024, 1, 3), "daily")
+
+ assert len(windows) == 3
+ assert [window.key for window in windows] == ["20240101", "20240102", "20240103"]
+ assert windows[0].start.hour == 0
+ assert windows[0].start.minute == 0
+ assert windows[0].end.hour == 23
+ assert windows[0].end.minute == 59
+ assert windows[0].end.second == 59
+
+
+def test_build_partitions_weekly():
+ windows = build_partitions(date(2024, 1, 1), date(2024, 1, 10), "weekly")
+
+ assert len(windows) == 2
+ assert [window.key for window in windows] == ["20240101-20240107", "20240108-20240110"]
+ assert windows[0].start.date() == date(2024, 1, 1)
+ assert windows[0].end.date() == date(2024, 1, 7)
+ assert windows[1].start.date() == date(2024, 1, 8)
+ assert windows[1].end.date() == date(2024, 1, 10)
diff --git a/adit/mass_transfer/tests/test_processor.py b/adit/mass_transfer/tests/test_processor.py
new file mode 100644
index 000000000..8af97febf
--- /dev/null
+++ b/adit/mass_transfer/tests/test_processor.py
@@ -0,0 +1,1961 @@
+from datetime import date, datetime, timedelta
+from pathlib import Path
+from types import SimpleNamespace
+from unittest.mock import MagicMock
+
+import pytest
+from adit_radis_shared.accounts.factories import UserFactory
+from django.utils import timezone
+from pydicom import Dataset
+from pytest_mock import MockerFixture
+
+from adit.core.errors import DicomError, RetriableDicomError
+from adit.core.factories import DicomFolderFactory, DicomServerFactory
+from adit.core.models import DicomNode
+from adit.core.utils.dicom_dataset import ResultDataset
+from adit.core.utils.dicom_operator import DicomOperator
+from adit.mass_transfer.models import (
+ MassTransferJob,
+ MassTransferSettings,
+ MassTransferTask,
+ MassTransferVolume,
+)
+from adit.mass_transfer.processors import (
+ DiscoveredSeries,
+ FilterSpec,
+ MassTransferTaskProcessor,
+ _age_at_study,
+ _birth_date_range,
+ _destination_base_dir,
+ _dicom_match,
+ _parse_int,
+ _series_folder_name,
+ _study_datetime,
+ _study_folder_name,
+)
+
+
+def _make_study(study_uid: str, study_date: str = "20240101") -> ResultDataset:
+ ds = Dataset()
+ ds.StudyInstanceUID = study_uid
+ ds.StudyDate = study_date
+ ds.StudyTime = "120000"
+ ds.PatientID = "PAT1"
+ ds.ModalitiesInStudy = ["CT"]
+ return ResultDataset(ds)
+
+
+def _fake_export_success(*args, **kwargs):
+ """Stub for _export_series that simulates a successful single-image export."""
+ return (1, "", "")
+
+
+def _make_discovered(
+ *,
+ patient_id: str = "PAT1",
+ study_uid: str = "study-1",
+ series_uid: str = "series-1",
+ modality: str = "CT",
+ study_description: str = "Brain CT",
+ series_description: str = "Axial",
+ series_number: int | None = 1,
+ study_datetime: datetime | None = None,
+) -> DiscoveredSeries:
+ return DiscoveredSeries(
+ patient_id=patient_id,
+ accession_number="ACC001",
+ study_instance_uid=study_uid,
+ series_instance_uid=series_uid,
+ modality=modality,
+ study_description=study_description,
+ series_description=series_description,
+ series_number=series_number,
+ study_datetime=study_datetime or datetime(2024, 1, 1, 12, 0),
+ institution_name="Radiology",
+ number_of_images=10,
+ )
+
+
+# ---------------------------------------------------------------------------
+# _find_studies tests
+# ---------------------------------------------------------------------------
+
+
+def _make_processor(mocker: MockerFixture) -> MassTransferTaskProcessor:
+ mock_task = mocker.MagicMock(spec=MassTransferTask)
+ mock_task._meta = MassTransferTask._meta
+ mocker.patch.object(MassTransferTaskProcessor, "__init__", return_value=None)
+ processor = MassTransferTaskProcessor.__new__(MassTransferTaskProcessor)
+ processor.dicom_task = mock_task
+ processor.mass_task = mock_task
+ return processor
+
+
+def _make_filter(**kwargs) -> FilterSpec:
+ return FilterSpec(
+ modality=kwargs.get("modality", "CT"),
+ study_description=kwargs.get("study_description", ""),
+ institution_name=kwargs.get("institution_name", ""),
+ apply_institution_on_study=kwargs.get("apply_institution_on_study", True),
+ series_description=kwargs.get("series_description", ""),
+ series_number=kwargs.get("series_number", None),
+ min_age=kwargs.get("min_age", None),
+ max_age=kwargs.get("max_age", None),
+ min_number_of_series_related_instances=kwargs.get(
+ "min_number_of_series_related_instances", None
+ ),
+ )
+
+
+@pytest.fixture
+def mass_transfer_env(tmp_path):
+ """Common setup for DB integration tests: settings, user, source, folder dest, job, task."""
+ MassTransferSettings.objects.create()
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create(path=str(tmp_path / "output"))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ pseudonymize=False,
+ pseudonym_salt="",
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+ now = timezone.now()
+ task = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=now,
+ partition_end=now + timedelta(hours=23, minutes=59, seconds=59),
+ partition_key="20240101",
+ )
+ return SimpleNamespace(job=job, task=task, source=source, destination=destination, user=user)
+
+
+@pytest.mark.django_db
+def test_find_studies_raises_when_time_window_too_small(mocker: MockerFixture):
+ MassTransferSettings.objects.create()
+
+ user = UserFactory.create()
+ source = DicomServerFactory.create(max_search_results=1)
+ destination = DicomFolderFactory.create()
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+
+ start = timezone.now()
+ end = start + timedelta(minutes=10)
+ task = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=start,
+ partition_end=end,
+ partition_key="20240101",
+ )
+
+ processor = MassTransferTaskProcessor(task)
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = source
+ operator.find_studies.return_value = [object(), object()]
+
+ mf = FilterSpec(modality="CT")
+ with pytest.raises(DicomError, match="Time window too small"):
+ processor._find_studies(operator, mf, start, end)
+
+
+def test_find_studies_returns_all_when_under_limit(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="CT")
+
+ start = datetime(2024, 1, 1, 0, 0, 0)
+ end = datetime(2024, 1, 1, 23, 59, 59)
+
+ studies = [_make_study("1.2.3"), _make_study("1.2.4"), _make_study("1.2.5")]
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=10)
+ operator.find_studies.return_value = studies
+
+ result = processor._find_studies(operator, mf, start, end)
+
+ assert len(result) == 3
+ assert operator.find_studies.call_count == 1
+
+
+def test_find_studies_splits_and_deduplicates(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="CT")
+
+ # Use a single-day range to test the time-based midpoint split
+ start = datetime(2024, 1, 1, 0, 0, 0)
+ end = datetime(2024, 1, 1, 23, 59, 59)
+
+ study_a = _make_study("1.2.100")
+ study_b = _make_study("1.2.200")
+ study_c = _make_study("1.2.300")
+ study_a_dup = _make_study("1.2.100")
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=2)
+ operator.find_studies.side_effect = [
+ [study_a, study_b, study_c],
+ [study_a, study_b],
+ [study_a_dup, study_c],
+ ]
+
+ result = processor._find_studies(operator, mf, start, end)
+
+ result_uids = [str(s.StudyInstanceUID) for s in result]
+ assert len(result) == 3
+ assert result_uids.count("1.2.100") == 1
+ assert "1.2.200" in result_uids
+ assert "1.2.300" in result_uids
+
+
+def test_find_studies_split_boundaries_dont_overlap(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="")
+
+ # Use a single-day range so we test the time-based midpoint split
+ start = datetime(2024, 1, 1, 0, 0, 0)
+ end = datetime(2024, 1, 1, 23, 59, 59)
+
+ call_ranges: list[tuple[datetime, datetime]] = []
+ original_find_studies = MassTransferTaskProcessor._find_studies
+
+ def tracking_find_studies(self_inner, operator, mf, s, e):
+ call_ranges.append((s, e))
+ return original_find_studies(self_inner, operator, mf, s, e)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=1)
+ operator.find_studies.side_effect = [
+ [_make_study("1"), _make_study("2")],
+ [_make_study("1")],
+ [_make_study("2")],
+ ]
+
+ mocker.patch.object(
+ MassTransferTaskProcessor,
+ "_find_studies",
+ side_effect=lambda self_inner, op, mf, s, e: tracking_find_studies(
+ self_inner, op, mf, s, e
+ ),
+ autospec=True,
+ )
+
+ processor._find_studies(operator, mf, start, end)
+
+ assert len(call_ranges) == 3
+ left_start, left_end = call_ranges[1]
+ right_start, right_end = call_ranges[2]
+
+ assert left_start == start
+ assert right_start > left_end
+
+
+def test_find_studies_same_day_split_narrows_study_time(mocker: MockerFixture):
+ """When splitting within a single day, StudyTime must narrow to avoid infinite recursion."""
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="CT")
+
+ start = datetime(2024, 1, 1, 8, 0, 0)
+ end = datetime(2024, 1, 1, 20, 0, 0)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=1)
+ # First call returns too many results (triggers split), sub-calls return under limit
+ operator.find_studies.side_effect = [
+ [_make_study("1"), _make_study("2")],
+ [_make_study("1")],
+ [_make_study("2")],
+ ]
+
+ processor._find_studies(operator, mf, start, end)
+
+ # 3 calls: initial + left half + right half
+ assert operator.find_studies.call_count == 3
+
+ queries = [call.args[0] for call in operator.find_studies.call_args_list]
+ initial_time = queries[0].dataset.StudyTime
+ left_time = queries[1].dataset.StudyTime
+ right_time = queries[2].dataset.StudyTime
+
+ # Initial query should use the actual start/end times
+ assert "080000" in initial_time
+ assert "200000" in initial_time
+
+ # Sub-queries should have narrower time ranges than the initial query
+ assert left_time != initial_time
+ assert right_time != initial_time
+
+
+def test_find_studies_cross_midnight_splits_at_midnight(mocker: MockerFixture):
+ """A cross-midnight window must split at midnight, not at the midpoint."""
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="CT")
+
+ # Window spans midnight: Jan 1 23:45 to Jan 2 00:15
+ start = datetime(2024, 1, 1, 23, 45, 0)
+ end = datetime(2024, 1, 2, 0, 15, 0)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+ # Two sub-queries: before midnight and after midnight
+ operator.find_studies.side_effect = [
+ [_make_study("1")],
+ [_make_study("2")],
+ ]
+
+ result = processor._find_studies(operator, mf, start, end)
+
+ assert len(result) == 2
+ assert operator.find_studies.call_count == 2
+
+ # Verify the queries use single-day ranges with proper times
+ q1 = operator.find_studies.call_args_list[0].args[0]
+ q2 = operator.find_studies.call_args_list[1].args[0]
+ assert "234500" in q1.dataset.StudyTime
+ assert "235959" in q1.dataset.StudyTime
+ assert "000000" in q2.dataset.StudyTime
+ assert "001500" in q2.dataset.StudyTime
+
+
+def test_find_studies_preserves_order_with_unique_studies(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ mf = _make_filter(modality="")
+
+ start = datetime(2024, 1, 1, 0, 0, 0)
+ end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=2)
+ operator.find_studies.side_effect = [
+ [_make_study("1.2.1"), _make_study("1.2.2"), _make_study("1.2.3")],
+ [_make_study("1.2.1"), _make_study("1.2.2")],
+ [_make_study("1.2.2"), _make_study("1.2.3")],
+ ]
+
+ result = processor._find_studies(operator, mf, start, end)
+
+ result_uids = [str(s.StudyInstanceUID) for s in result]
+ assert result_uids == ["1.2.1", "1.2.2", "1.2.3"]
+
+
+# ---------------------------------------------------------------------------
+# _discover_series tests
+# ---------------------------------------------------------------------------
+
+
+def _make_series_result(
+ series_uid: str,
+ modality: str = "CT",
+ series_description: str = "Axial",
+ series_number: int = 1,
+ institution_name: str = "Radiology",
+ num_images: int = 10,
+) -> ResultDataset:
+ ds = Dataset()
+ ds.SeriesInstanceUID = series_uid
+ ds.Modality = modality
+ ds.SeriesDescription = series_description
+ ds.SeriesNumber = series_number
+ ds.InstitutionName = institution_name
+ ds.NumberOfSeriesRelatedInstances = num_images
+ return ResultDataset(ds)
+
+
+def test_discover_series_filters_by_modality(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ processor.mass_task.partition_start = datetime(2024, 1, 1, 0, 0)
+ processor.mass_task.partition_end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+
+ study = _make_study("1.2.3.100")
+ study.dataset.ModalitiesInStudy = ["CT", "MR"]
+ operator.find_studies.return_value = [study]
+
+ ct_series = _make_series_result("1.2.3.201", modality="CT")
+ mr_series = _make_series_result("1.2.3.202", modality="MR")
+ operator.find_series.return_value = [ct_series, mr_series]
+
+ # Filter for MR only
+ filters = [_make_filter(modality="MR")]
+ result = processor._discover_series(operator, filters)
+
+ assert len(result) == 1
+ assert result[0].series_instance_uid == "1.2.3.202"
+
+
+def test_discover_series_deduplicates_across_filters(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ processor.mass_task.partition_start = datetime(2024, 1, 1, 0, 0)
+ processor.mass_task.partition_end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+
+ study = _make_study("1.2.3.100")
+ study.dataset.ModalitiesInStudy = ["CT"]
+ operator.find_studies.return_value = [study]
+
+ series = _make_series_result("1.2.3.301", modality="CT")
+ operator.find_series.return_value = [series]
+
+ # Two filters that both match the same series
+ filters = [_make_filter(modality="CT"), _make_filter(modality="CT")]
+ result = processor._discover_series(operator, filters)
+
+ assert len(result) == 1
+
+
+def test_discover_series_filters_by_series_description(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ processor.mass_task.partition_start = datetime(2024, 1, 1, 0, 0)
+ processor.mass_task.partition_end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+
+ study = _make_study("1.2.3.100")
+ study.dataset.ModalitiesInStudy = ["CT"]
+ operator.find_studies.return_value = [study]
+
+ axial = _make_series_result("1.2.3.401", series_description="Axial T1")
+ sagittal = _make_series_result("1.2.3.402", series_description="Sagittal T2")
+ operator.find_series.return_value = [axial, sagittal]
+
+ filters = [_make_filter(modality="CT", series_description="Axial*")]
+ result = processor._discover_series(operator, filters)
+
+ assert len(result) == 1
+ assert result[0].series_instance_uid == "1.2.3.401"
+
+
+def test_discover_series_filters_by_min_instances(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ processor.mass_task.partition_start = datetime(2024, 1, 1, 0, 0)
+ processor.mass_task.partition_end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+
+ study = _make_study("1.2.3.100")
+ study.dataset.ModalitiesInStudy = ["CT"]
+ operator.find_studies.return_value = [study]
+
+ big_series = _make_series_result("1.2.3.501", num_images=10)
+ small_series = _make_series_result("1.2.3.502", num_images=2)
+ operator.find_series.return_value = [big_series, small_series]
+
+ filters = [_make_filter(modality="CT", min_number_of_series_related_instances=5)]
+ result = processor._discover_series(operator, filters)
+
+ assert len(result) == 1
+ assert result[0].series_instance_uid == "1.2.3.501"
+
+
+def test_discover_series_no_min_instances_filter_includes_all(mocker: MockerFixture):
+ processor = _make_processor(mocker)
+ processor.mass_task.partition_start = datetime(2024, 1, 1, 0, 0)
+ processor.mass_task.partition_end = datetime(2024, 1, 1, 23, 59, 59)
+
+ operator = mocker.create_autospec(DicomOperator)
+ operator.server = mocker.MagicMock(max_search_results=200)
+
+ study = _make_study("1.2.3.100")
+ study.dataset.ModalitiesInStudy = ["CT"]
+ operator.find_studies.return_value = [study]
+
+ big_series = _make_series_result("1.2.3.501", num_images=10)
+ small_series = _make_series_result("1.2.3.502", num_images=2)
+ operator.find_series.return_value = [big_series, small_series]
+
+ filters = [_make_filter(modality="CT")] # no min_number_of_series_related_instances
+ result = processor._discover_series(operator, filters)
+
+ assert len(result) == 2
+
+
+# ---------------------------------------------------------------------------
+# process() tests — mocked environment
+# ---------------------------------------------------------------------------
+
+
+def _make_process_env(
+ mocker: MockerFixture,
+ tmp_path: Path,
+ *,
+ convert_to_nifti: bool = False,
+ pseudonymize: bool = True,
+ pseudonym_salt: str = "test-salt-for-deterministic-pseudonyms",
+) -> MassTransferTaskProcessor:
+ processor = _make_processor(mocker)
+
+ mock_job = processor.mass_task.job
+ mock_job.convert_to_nifti = convert_to_nifti
+ mock_job.pseudonymize = pseudonymize
+ mock_job.pseudonym_salt = pseudonym_salt
+ mock_job.filters_json = [{"modality": "CT"}]
+ mock_job.get_filters.return_value = [FilterSpec.from_dict({"modality": "CT"})]
+
+ processor.mass_task.source.node_type = DicomNode.NodeType.SERVER
+ processor.mass_task.source.dicomserver = mocker.MagicMock()
+ processor.mass_task.destination.node_type = DicomNode.NodeType.FOLDER
+ processor.mass_task.destination.dicomfolder.path = str(tmp_path / "output")
+
+ processor.mass_task.pk = 42
+ processor.mass_task.partition_key = "20240101"
+
+ mocker.patch.object(processor, "is_suspended", return_value=False)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+
+ # Mock DB operations used by the processor
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "filter",
+ return_value=mocker.MagicMock(delete=mocker.MagicMock()),
+ )
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "bulk_create",
+ side_effect=lambda objs: objs,
+ )
+ mocker.patch.object(MassTransferVolume, "save")
+
+ return processor
+
+
+def _make_process_env_server_dest(
+ mocker: MockerFixture,
+ *,
+ pseudonymize: bool = True,
+ pseudonym_salt: str = "test-salt-for-deterministic-pseudonyms",
+ dest_operator: MagicMock | None = None,
+) -> tuple["MassTransferTaskProcessor", MagicMock]:
+ """Set up a processor with a SERVER destination.
+
+ Returns (processor, dest_operator_mock).
+ """
+ processor = _make_processor(mocker)
+
+ mock_job = processor.mass_task.job
+ mock_job.convert_to_nifti = False
+ mock_job.pseudonymize = pseudonymize
+ mock_job.pseudonym_salt = pseudonym_salt
+ mock_job.filters_json = [{"modality": "CT"}]
+ mock_job.get_filters.return_value = [FilterSpec.from_dict({"modality": "CT"})]
+
+ processor.mass_task.source.node_type = DicomNode.NodeType.SERVER
+ processor.mass_task.source.dicomserver = mocker.MagicMock()
+ processor.mass_task.destination.node_type = DicomNode.NodeType.SERVER
+ processor.mass_task.destination.dicomserver = mocker.MagicMock()
+
+ processor.mass_task.pk = 42
+ processor.mass_task.partition_key = "20240101"
+
+ mocker.patch.object(processor, "is_suspended", return_value=False)
+
+ source_mock = mocker.MagicMock()
+ if dest_operator is None:
+ dest_operator = mocker.MagicMock()
+ # dest DicomOperator is created first, source second
+ mocker.patch(
+ "adit.mass_transfer.processors.DicomOperator",
+ side_effect=[dest_operator, source_mock],
+ )
+
+ # Mock DB operations used by the processor
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "filter",
+ return_value=mocker.MagicMock(delete=mocker.MagicMock()),
+ )
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "bulk_create",
+ side_effect=lambda objs: objs,
+ )
+ mocker.patch.object(MassTransferVolume, "save")
+
+ return processor, dest_operator
+
+
+def test_process_reraises_retriable_dicom_error(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch.object(
+ processor,
+ "_export_series",
+ side_effect=RetriableDicomError("PACS connection lost"),
+ )
+
+ with pytest.raises(RetriableDicomError, match="PACS connection lost"):
+ processor.process()
+
+
+def test_process_returns_warning_on_partial_failure(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ series = [
+ _make_discovered(series_uid="s-1"),
+ _make_discovered(series_uid="s-2"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ call_count = {"n": 0}
+
+ def fake_export(*args, **kwargs):
+ call_count["n"] += 1
+ if call_count["n"] == 2:
+ raise DicomError("Export failed")
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.WARNING
+ assert "Processed: 1" in result["log"]
+ assert "Failed: 1" in result["log"]
+
+
+def test_process_returns_failure_when_all_fail(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ series = [
+ _make_discovered(series_uid="s-1"),
+ _make_discovered(series_uid="s-2"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch.object(processor, "_export_series", side_effect=DicomError("PACS down"))
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.FAILURE
+ assert "Failed: 2" in result["log"]
+
+
+def test_process_returns_warning_when_suspended(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ mocker.patch.object(processor, "is_suspended", return_value=True)
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.WARNING
+ assert "suspended" in result["log"].lower()
+
+
+def test_process_raises_when_source_not_server(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ processor.mass_task.source.node_type = DicomNode.NodeType.FOLDER
+
+ with pytest.raises(DicomError, match="source must be a DICOM server"):
+ processor.process()
+
+
+def test_process_returns_failure_when_no_filters(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ processor.mass_task.job.filters_json = []
+ processor.mass_task.job.get_filters.return_value = []
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.FAILURE
+ assert "filter" in result["log"].lower()
+
+
+def test_process_returns_success_for_empty_partition(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_process_env(mocker, tmp_path)
+ mocker.patch.object(processor, "_discover_series", return_value=[])
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+ assert "No series found" in result["message"]
+
+
+def test_process_cleans_partition_on_retry(mocker: MockerFixture, tmp_path: Path):
+ """On retry, ALL pre-existing volumes for the partition are deleted and rediscovered."""
+ processor = _make_process_env(mocker, tmp_path)
+ series = [
+ _make_discovered(series_uid="s-1"),
+ _make_discovered(series_uid="s-2"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ # Track the delete call on the volume queryset
+ mock_filter_qs = mocker.MagicMock()
+ mocker.patch.object(MassTransferVolume.objects, "filter", return_value=mock_filter_qs)
+
+ export_calls = []
+
+ def fake_export(*args, **kwargs):
+ export_calls.append(1)
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ result = processor.process()
+
+ # All pre-existing volumes for the partition were deleted
+ mock_filter_qs.delete.assert_called_once()
+ # Both series were exported fresh (no skipping)
+ assert len(export_calls) == 2
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+
+
+# ---------------------------------------------------------------------------
+# Server destination tests
+# ---------------------------------------------------------------------------
+
+
+def test_process_server_destination_exports_and_uploads(mocker: MockerFixture):
+ processor, mock_dest_operator = _make_process_env_server_dest(mocker)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ path.mkdir(parents=True, exist_ok=True)
+ (path / "dummy.dcm").write_bytes(b"fake")
+ return (1, "pseudo-study-uid", "pseudo-series-uid")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ result = processor.process()
+
+ mock_dest_operator.upload_images.assert_called()
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+
+
+def test_process_server_destination_cleans_volumes_on_retry(mocker: MockerFixture):
+ """Server destination should still delete old DB volume records on retry."""
+ processor, _ = _make_process_env_server_dest(mocker)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ mock_filter_qs = mocker.MagicMock()
+ mocker.patch.object(MassTransferVolume.objects, "filter", return_value=mock_filter_qs)
+
+ mocker.patch.object(processor, "_export_series", side_effect=_fake_export_success)
+
+ processor.process()
+
+ mock_filter_qs.delete.assert_called_once()
+
+
+def test_process_server_destination_closes_dest_operator(mocker: MockerFixture):
+ """dest_operator.close() should be called even if transfer fails."""
+ processor, mock_dest_operator = _make_process_env_server_dest(mocker)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch.object(processor, "_export_series", side_effect=DicomError("PACS down"))
+
+ processor.process()
+
+ mock_dest_operator.close.assert_called()
+
+
+def test_export_series_to_server_skips_upload_on_zero_images(mocker: MockerFixture):
+ """When _export_series returns 0 images, upload_images must NOT be called."""
+ processor = _make_processor(mocker)
+ volume = MassTransferVolume(
+ series_instance_uid="s-1",
+ study_instance_uid="study-1",
+ patient_id="PAT1",
+ number_of_images=10,
+ study_datetime=timezone.now(),
+ )
+ mock_operator = mocker.MagicMock()
+ mock_dest_operator = mocker.MagicMock()
+
+ mocker.patch.object(processor, "_export_series", return_value=(0, "", ""))
+
+ processor._export_series_to_server(mock_operator, volume, None, "subject-1", mock_dest_operator)
+
+ mock_dest_operator.upload_images.assert_not_called()
+ assert volume.status == MassTransferVolume.Status.ERROR
+
+
+def test_export_series_to_server_skips_non_image_series(mocker: MockerFixture):
+ """Non-image series (0 instances in PACS) gets SKIPPED, not ERROR."""
+ processor = _make_processor(mocker)
+ volume = MassTransferVolume(
+ series_instance_uid="s-1",
+ study_instance_uid="study-1",
+ patient_id="PAT1",
+ number_of_images=0,
+ study_datetime=timezone.now(),
+ )
+ mock_operator = mocker.MagicMock()
+ mock_dest_operator = mocker.MagicMock()
+
+ mocker.patch.object(processor, "_export_series", return_value=(0, "", ""))
+
+ processor._export_series_to_server(mock_operator, volume, None, "subject-1", mock_dest_operator)
+
+ mock_dest_operator.upload_images.assert_not_called()
+ assert volume.status == MassTransferVolume.Status.SKIPPED
+
+
+def test_server_destination_upload_dicom_error_marks_failure(mocker: MockerFixture):
+ """When upload_images raises DicomError, the series should be marked as failed."""
+ processor, mock_dest_operator = _make_process_env_server_dest(mocker)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ return (1, "pseudo-study-uid", "pseudo-series-uid")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+ mock_dest_operator.upload_images.side_effect = DicomError("C-STORE rejected")
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.FAILURE
+
+
+def test_server_destination_upload_retriable_error_propagates(mocker: MockerFixture):
+ """When upload_images raises RetriableDicomError, it must propagate up."""
+ processor, mock_dest_operator = _make_process_env_server_dest(mocker)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ return (1, "pseudo-study-uid", "pseudo-series-uid")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+ mock_dest_operator.upload_images.side_effect = RetriableDicomError("Connection reset")
+
+ with pytest.raises(RetriableDicomError, match="Connection reset"):
+ processor.process()
+
+
+def test_process_none_mode_uses_patient_id_as_subject(mocker: MockerFixture, tmp_path: Path):
+ """When pseudonymize=False, no pseudonymizer is used."""
+ processor = _make_process_env(mocker, tmp_path, pseudonymize=False, pseudonym_salt="")
+ series = [_make_discovered(patient_id="REAL-PAT-1", series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ export_calls: list[tuple] = []
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ export_calls.append((subject_id, pseudonymizer))
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ result = processor.process()
+
+ assert len(export_calls) == 1
+ subject_id, pseudonymizer = export_calls[0]
+ assert subject_id == "REAL-PAT-1"
+ assert pseudonymizer is None
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+
+
+def test_process_pseudonymize_mode_same_study_same_pseudonym(mocker: MockerFixture, tmp_path: Path):
+ """In non-linking mode, series in the same study share a pseudonym."""
+ processor = _make_process_env(mocker, tmp_path, pseudonym_salt="")
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-2"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ subject_ids: list[str] = []
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ subject_ids.append(subject_id)
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ processor.process()
+
+ # Same study → same pseudonym
+ assert subject_ids[0] == subject_ids[1]
+ assert subject_ids[0] != ""
+ assert subject_ids[0] != "PAT1"
+
+
+def test_process_pseudonymize_mode_different_studies_different_pseudonyms(
+ mocker: MockerFixture, tmp_path: Path
+):
+ """In non-linking mode, different studies for the same patient get different pseudonyms."""
+ processor = _make_process_env(mocker, tmp_path, pseudonym_salt="")
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ _make_discovered(patient_id="PAT1", study_uid="study-B", series_uid="s-2"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ subject_ids: list[str] = []
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ subject_ids.append(subject_id)
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ processor.process()
+
+ # Different studies → different pseudonyms (non-linkable)
+ assert subject_ids[0] != subject_ids[1]
+ assert subject_ids[0] != ""
+ assert subject_ids[0] != "PAT1"
+
+
+def test_process_linking_mode_uses_deterministic_pseudonym(mocker: MockerFixture, tmp_path: Path):
+ """In linking mode (pseudonymize with non-empty salt), pseudonyms are deterministic."""
+ processor = _make_process_env(
+ mocker, tmp_path, pseudonym_salt="test-salt-for-deterministic-pseudonyms"
+ )
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ ]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+
+ subject_ids: list[str] = []
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ subject_ids.append(subject_id)
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ processor.process()
+
+ assert subject_ids[0] != ""
+ assert subject_ids[0] != "PAT1"
+ # Pseudonym should be deterministic — running again with same salt gives same result
+ from adit.core.utils.pseudonymizer import compute_pseudonym
+ from adit.mass_transfer.processors import _DETERMINISTIC_PSEUDONYM_LENGTH
+
+ expected = compute_pseudonym(
+ "test-salt-for-deterministic-pseudonyms",
+ "PAT1",
+ length=_DETERMINISTIC_PSEUDONYM_LENGTH,
+ )
+ assert subject_ids[0] == expected
+
+
+# ---------------------------------------------------------------------------
+# _convert_series tests
+# ---------------------------------------------------------------------------
+
+
+def test_convert_series_raises_on_dcm2niix_failure(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_processor(mocker)
+ volume = MassTransferVolume(series_instance_uid="1.2.3", study_datetime=timezone.now())
+
+ dicom_dir = tmp_path / "dicom_input"
+ dicom_dir.mkdir()
+ output_path = tmp_path / "output"
+
+ mocker.patch(
+ "adit.core.utils.dicom_to_nifti_converter.DicomToNiftiConverter.convert",
+ side_effect=RuntimeError("conversion failed"),
+ )
+
+ with pytest.raises(DicomError, match="Conversion failed"):
+ processor._convert_series(volume, dicom_dir, output_path)
+
+
+def test_convert_series_raises_when_no_nifti_output(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_processor(mocker)
+ volume = MassTransferVolume(series_instance_uid="1.2.3", study_datetime=timezone.now())
+
+ dicom_dir = tmp_path / "dicom_input"
+ dicom_dir.mkdir()
+ output_path = tmp_path / "output"
+
+ # Converter succeeds (does nothing), but output dir has no .nii.gz files
+ mocker.patch(
+ "adit.core.utils.dicom_to_nifti_converter.DicomToNiftiConverter.convert",
+ )
+
+ with pytest.raises(DicomError, match="no .nii.gz files"):
+ processor._convert_series(volume, dicom_dir, output_path)
+
+
+def test_convert_series_skips_non_image_dicom(mocker: MockerFixture, tmp_path: Path):
+ processor = _make_processor(mocker)
+ volume = MassTransferVolume(series_instance_uid="1.2.3", study_datetime=timezone.now())
+
+ dicom_dir = tmp_path / "dicom_input"
+ dicom_dir.mkdir()
+ output_path = tmp_path / "output"
+
+ mocker.patch(
+ "adit.core.utils.dicom_to_nifti_converter.DicomToNiftiConverter.convert",
+ side_effect=RuntimeError("No valid DICOM images were found"),
+ )
+
+ # Should not raise — non-image DICOMs are silently skipped
+ processor._convert_series(volume, dicom_dir, output_path)
+
+
+# ---------------------------------------------------------------------------
+# Utility function tests
+# ---------------------------------------------------------------------------
+
+
+def test_series_folder_name_with_number_and_description():
+ assert _series_folder_name("Head CT", 1, "1.2.3") == "Head CT_1"
+
+
+def test_series_folder_name_with_no_description():
+ assert _series_folder_name("", 1, "1.2.3") == "Undefined_1"
+
+
+def test_series_folder_name_with_no_number():
+ assert _series_folder_name("Head CT", None, "1.2.3.4.5") == "Head CT_1.2.3.4.5"
+
+
+def test_study_folder_name_includes_description_and_date():
+ name = _study_folder_name("Brain CT", datetime(2024, 1, 15, 10, 30))
+ assert name == "Brain CT_20240115_103000"
+
+
+def test_parse_int_normal():
+ assert _parse_int("42") == 42
+
+
+def test_parse_int_none_returns_default():
+ assert _parse_int(None, default=7) == 7
+
+
+def test_parse_int_empty_returns_default():
+ assert _parse_int("", default=0) == 0
+
+
+def test_parse_int_garbage_returns_default():
+ assert _parse_int("abc", default=None) is None
+
+
+def test_study_datetime_with_time():
+ ds = Dataset()
+ ds.StudyDate = "20240115"
+ ds.StudyTime = "103000"
+ result = _study_datetime(ResultDataset(ds))
+ assert result == datetime(2024, 1, 15, 10, 30, 0)
+
+
+def test_study_datetime_with_midnight():
+ ds = Dataset()
+ ds.StudyDate = "20240115"
+ ds.StudyTime = "000000"
+ result = _study_datetime(ResultDataset(ds))
+ assert result == datetime(2024, 1, 15, 0, 0, 0)
+
+
+def test_dicom_match_empty_pattern_matches_anything():
+ assert _dicom_match("", "anything") is True
+ assert _dicom_match("", None) is True
+ assert _dicom_match("", "") is True
+
+
+def test_dicom_match_none_value_never_matches():
+ assert _dicom_match("CT", None) is False
+
+
+def test_dicom_match_exact():
+ assert _dicom_match("CT", "CT") is True
+ assert _dicom_match("CT", "MR") is False
+
+
+def test_dicom_match_wildcard():
+ assert _dicom_match("Head*", "Head CT") is True
+ assert _dicom_match("Head*", "Foot CT") is False
+
+
+# ---------------------------------------------------------------------------
+# DB integration tests
+# ---------------------------------------------------------------------------
+
+
+@pytest.mark.django_db
+def test_process_creates_volume_records_on_success(mocker: MockerFixture, mass_transfer_env):
+ """Volumes are created in PENDING then updated to EXPORTED after successful export."""
+ env = mass_transfer_env
+ series = [_make_discovered(patient_id="PAT1", series_uid="1.2.3.4.5")]
+
+ processor = MassTransferTaskProcessor(env.task)
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+ mocker.patch.object(processor, "_export_series", side_effect=_fake_export_success)
+
+ assert MassTransferVolume.objects.filter(job=env.job).count() == 0
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+ vol = MassTransferVolume.objects.get(job=env.job, series_instance_uid="1.2.3.4.5")
+ assert vol.status == MassTransferVolume.Status.EXPORTED
+ assert vol.patient_id == "PAT1"
+ assert vol.task == env.task
+
+
+@pytest.mark.django_db
+def test_process_creates_error_volume_on_failure(mocker: MockerFixture, mass_transfer_env):
+ """Failed exports still create a volume record with ERROR status."""
+ env = mass_transfer_env
+ series = [_make_discovered(patient_id="PAT1", series_uid="1.2.3.4.5")]
+
+ processor = MassTransferTaskProcessor(env.task)
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+ mocker.patch.object(processor, "_export_series", side_effect=DicomError("Export failed"))
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.FAILURE
+ vol = MassTransferVolume.objects.get(job=env.job, series_instance_uid="1.2.3.4.5")
+ assert vol.status == MassTransferVolume.Status.ERROR
+ assert "Export failed" in vol.log
+
+
+@pytest.mark.django_db
+def test_process_deletes_all_volumes_on_retry(mocker: MockerFixture, mass_transfer_env):
+ """On retry, ALL volumes from prior runs are deleted before rediscovery."""
+ env = mass_transfer_env
+ job, task = env.job, env.task
+
+ # Simulate a prior failed run that left an ERROR volume
+ MassTransferVolume.objects.create(
+ job=job,
+ task=task,
+ partition_key="20240101",
+ patient_id="PAT1",
+ study_instance_uid="study-1",
+ series_instance_uid="1.2.3.4.5",
+ modality="CT",
+ study_description="Brain CT",
+ series_description="Axial",
+ series_number=1,
+ study_datetime=timezone.now(),
+ status=MassTransferVolume.Status.ERROR,
+ log="Previous failure",
+ )
+
+ series = [_make_discovered(patient_id="PAT1", series_uid="1.2.3.4.5")]
+
+ processor = MassTransferTaskProcessor(task)
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+ mocker.patch.object(processor, "_export_series", side_effect=_fake_export_success)
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+ # Old ERROR volume deleted, new EXPORTED volume created
+ vols = MassTransferVolume.objects.filter(job=job, series_instance_uid="1.2.3.4.5")
+ assert vols.count() == 1
+ vol = vols.first()
+ assert vol is not None
+ assert vol.status == MassTransferVolume.Status.EXPORTED
+
+
+@pytest.mark.django_db
+def test_process_deterministic_pseudonyms_across_partitions(mocker: MockerFixture, tmp_path: Path):
+ """Same patient gets the same pseudonym across different partitions (linking mode)."""
+ MassTransferSettings.objects.create()
+
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create(path=str(tmp_path / "output"))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 2),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ pseudonym_salt="test-salt",
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+
+ task1 = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=timezone.make_aware(datetime(2024, 1, 1)),
+ partition_end=timezone.make_aware(datetime(2024, 1, 1, 23, 59, 59)),
+ partition_key="20240101",
+ )
+ task2 = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=timezone.make_aware(datetime(2024, 1, 2)),
+ partition_end=timezone.make_aware(datetime(2024, 1, 2, 23, 59, 59)),
+ partition_key="20240102",
+ )
+
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+
+ # Partition 1: PAT1
+ series1 = [
+ _make_discovered(
+ patient_id="PAT1",
+ study_uid="1.2.3.100",
+ series_uid="1.2.3.100.1",
+ )
+ ]
+ processor1 = MassTransferTaskProcessor(task1)
+ mocker.patch.object(processor1, "_discover_series", return_value=series1)
+ mocker.patch.object(processor1, "_export_series", side_effect=_fake_export_success)
+ processor1.process()
+
+ # Partition 2: same PAT1
+ series2 = [
+ _make_discovered(
+ patient_id="PAT1",
+ study_uid="1.2.3.200",
+ series_uid="1.2.3.200.1",
+ )
+ ]
+ processor2 = MassTransferTaskProcessor(task2)
+ mocker.patch.object(processor2, "_discover_series", return_value=series2)
+ mocker.patch.object(processor2, "_export_series", side_effect=_fake_export_success)
+ processor2.process()
+
+ vol1 = MassTransferVolume.objects.get(series_instance_uid="1.2.3.100.1")
+ vol2 = MassTransferVolume.objects.get(series_instance_uid="1.2.3.200.1")
+
+ # Linking mode: same patient → same pseudonym across partitions
+ assert vol1.pseudonym == vol2.pseudonym
+ assert vol1.pseudonym != ""
+ assert vol1.pseudonym != "PAT1"
+
+
+@pytest.mark.django_db
+def test_process_pseudonymize_mode_not_linked_across_partitions(
+ mocker: MockerFixture, tmp_path: Path
+):
+ """Non-linking pseudonymize mode: same patient gets different pseudonyms across partitions."""
+ MassTransferSettings.objects.create()
+
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create(path=str(tmp_path / "output"))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 2),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ pseudonymize=True,
+ pseudonym_salt="",
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+
+ task1 = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=timezone.make_aware(datetime(2024, 1, 1)),
+ partition_end=timezone.make_aware(datetime(2024, 1, 1, 23, 59, 59)),
+ partition_key="20240101",
+ )
+ task2 = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=timezone.make_aware(datetime(2024, 1, 2)),
+ partition_end=timezone.make_aware(datetime(2024, 1, 2, 23, 59, 59)),
+ partition_key="20240102",
+ )
+
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+
+ series1 = [
+ _make_discovered(
+ patient_id="PAT1",
+ study_uid="1.2.3.100",
+ series_uid="1.2.3.100.1",
+ )
+ ]
+ processor1 = MassTransferTaskProcessor(task1)
+ mocker.patch.object(processor1, "_discover_series", return_value=series1)
+ mocker.patch.object(processor1, "_export_series", side_effect=_fake_export_success)
+ processor1.process()
+
+ series2 = [
+ _make_discovered(
+ patient_id="PAT1",
+ study_uid="1.2.3.200",
+ series_uid="1.2.3.200.1",
+ )
+ ]
+ processor2 = MassTransferTaskProcessor(task2)
+ mocker.patch.object(processor2, "_discover_series", return_value=series2)
+ mocker.patch.object(processor2, "_export_series", side_effect=_fake_export_success)
+ processor2.process()
+
+ vol1 = MassTransferVolume.objects.get(series_instance_uid="1.2.3.100.1")
+ vol2 = MassTransferVolume.objects.get(series_instance_uid="1.2.3.200.1")
+
+ # Non-linking mode: same patient should get DIFFERENT random pseudonyms
+ assert vol1.pseudonym != ""
+ assert vol2.pseudonym != ""
+ assert vol1.pseudonym != "PAT1"
+ assert vol1.pseudonym != vol2.pseudonym
+
+
+# ---------------------------------------------------------------------------
+# Age filtering tests
+# ---------------------------------------------------------------------------
+
+
+def test_age_at_study_basic():
+ assert _age_at_study(date(1990, 6, 15), date(2025, 6, 15)) == 35
+ assert _age_at_study(date(1990, 6, 15), date(2025, 6, 14)) == 34
+ assert _age_at_study(date(1990, 6, 15), date(2025, 6, 16)) == 35
+
+
+def test_age_at_study_leap_year():
+ assert _age_at_study(date(2000, 2, 29), date(2025, 2, 28)) == 24
+ assert _age_at_study(date(2000, 2, 29), date(2025, 3, 1)) == 25
+
+
+def test_birth_date_range_no_age_limits():
+ assert _birth_date_range(date(2025, 1, 1), date(2025, 1, 31), None, None) is None
+
+
+def test_birth_date_range_min_only():
+ result = _birth_date_range(date(2025, 3, 15), date(2025, 3, 15), 18, None)
+ assert result is not None
+ earliest, latest = result
+ # Latest birth: someone who is 18 on study date could be born up to end of year 2008
+ assert latest.year >= 2007
+ assert earliest == date(1900, 1, 1)
+
+
+def test_birth_date_range_max_only():
+ result = _birth_date_range(date(2025, 3, 15), date(2025, 3, 15), None, 65)
+ assert result is not None
+ earliest, latest = result
+ # Earliest birth: someone who is 65 on study date was born ~1959
+ assert earliest.year <= 1960
+
+
+def test_birth_date_range_both():
+ result = _birth_date_range(date(2025, 3, 15), date(2025, 3, 15), 18, 65)
+ assert result is not None
+ earliest, latest = result
+ assert earliest < latest
+
+
+# ---------------------------------------------------------------------------
+# FilterSpec tests
+# ---------------------------------------------------------------------------
+
+
+def test_filter_spec_from_dict():
+ d = {
+ "modality": "MR",
+ "institution_name": "Neuroradiologie",
+ "min_age": 18,
+ "max_age": 90,
+ }
+ fs = FilterSpec.from_dict(d)
+ assert fs.modality == "MR"
+ assert fs.institution_name == "Neuroradiologie"
+ assert fs.min_age == 18
+ assert fs.max_age == 90
+ assert fs.study_description == ""
+ assert fs.apply_institution_on_study is True
+
+
+def test_filter_spec_from_dict_with_min_instances():
+ d = {"modality": "CT", "min_number_of_series_related_instances": 5}
+ fs = FilterSpec.from_dict(d)
+ assert fs.min_number_of_series_related_instances == 5
+
+
+def test_filter_spec_from_dict_without_min_instances():
+ d = {"modality": "CT"}
+ fs = FilterSpec.from_dict(d)
+ assert fs.min_number_of_series_related_instances is None
+
+
+# ---------------------------------------------------------------------------
+# DICOM metadata tests
+# ---------------------------------------------------------------------------
+
+
+def test_write_dicom_metadata(tmp_path: Path):
+ from adit.mass_transfer.processors import _write_dicom_metadata
+
+ fields = {
+ "PatientBirthDate": "19900101",
+ "PatientSex": "M",
+ "PatientAgeAtStudy": "35",
+ "StudyDate": "20250315",
+ "StudyInstanceUID": "1.2.3.4.5",
+ "SeriesInstanceUID": "1.2.3.4.5.6",
+ "Modality": "MR",
+ }
+
+ _write_dicom_metadata(tmp_path, "T1w_3D_101", fields)
+
+ import json
+
+ metadata = tmp_path / "T1w_3D_101_dicom.json"
+ assert metadata.exists()
+ result = json.loads(metadata.read_text())
+ assert result["PatientBirthDate"] == "19900101"
+ assert result["PatientAgeAtStudy"] == "35"
+ assert result["StudyInstanceUID"] == "1.2.3.4.5"
+ assert result["Modality"] == "MR"
+
+
+def test_write_dicom_metadata_empty_fields(tmp_path: Path):
+ from adit.mass_transfer.processors import _write_dicom_metadata
+
+ _write_dicom_metadata(tmp_path, "series_1", {})
+
+ # No file should be written when fields are empty
+ assert not list(tmp_path.glob("*.json"))
+
+
+def _write_test_dicom(path: Path, **kwargs) -> None:
+ """Write a minimal valid DICOM file for testing."""
+ import pydicom
+
+ ds = pydicom.Dataset()
+ for k, v in kwargs.items():
+ setattr(ds, k, v)
+ ds.SOPClassUID = kwargs.get("SOPClassUID", "1.2.840.10008.5.1.4.1.1.4")
+ ds.SOPInstanceUID = kwargs.get("SOPInstanceUID", "1.2.3.4.5")
+ from pydicom.dataset import FileMetaDataset
+ from pydicom.uid import ExplicitVRLittleEndian
+
+ file_meta = FileMetaDataset()
+ file_meta.TransferSyntaxUID = ExplicitVRLittleEndian
+ file_meta.MediaStorageSOPClassUID = ds.SOPClassUID
+ file_meta.MediaStorageSOPInstanceUID = ds.SOPInstanceUID
+ ds.file_meta = file_meta
+ pydicom.dcmwrite(str(path), ds, enforce_file_format=True)
+
+
+def test_extract_dicom_metadata_computes_age(tmp_path: Path):
+ """_extract_dicom_metadata should compute PatientAgeAtStudy from birth date and study date."""
+ from adit.mass_transfer.processors import _extract_dicom_metadata
+
+ _write_test_dicom(
+ tmp_path / "test.dcm",
+ PatientBirthDate="19900615",
+ PatientSex="M",
+ StudyDate="20250615",
+ StudyInstanceUID="1.2.3",
+ SeriesInstanceUID="1.2.3.4",
+ Modality="MR",
+ )
+
+ result = _extract_dicom_metadata(tmp_path)
+ assert result["PatientAgeAtStudy"] == "35"
+ assert result["PatientBirthDate"] == "19900615"
+ assert result["PatientSex"] == "M"
+ assert result["StudyInstanceUID"] == "1.2.3"
+
+
+def test_extract_dicom_metadata_pseudonymized_has_no_real_data(tmp_path: Path):
+ """When pseudonymization is applied, metadata should contain pseudonymized values,
+ not originals.
+
+ This test simulates the post-pseudonymization state: the DICOM files on disk have already
+ been anonymized by dicognito + Pseudonymizer before _extract_dicom_metadata runs.
+ We verify the metadata contains only the pseudonymized values.
+ """
+ from adit.mass_transfer.processors import _extract_dicom_metadata
+
+ _write_test_dicom(
+ tmp_path / "test.dcm",
+ PatientID="ABCDEF123456",
+ PatientName="ABCDEF123456",
+ PatientBirthDate="19920101",
+ PatientSex="M",
+ StudyDate="20260101",
+ StudyInstanceUID="2.25.999999999",
+ SeriesInstanceUID="2.25.888888888",
+ Modality="MR",
+ )
+
+ result = _extract_dicom_metadata(tmp_path)
+
+ # Metadata must contain the pseudonymized values (what's on disk)
+ assert result["PatientID"] == "ABCDEF123456"
+ assert result["PatientBirthDate"] == "19920101"
+ assert result["StudyInstanceUID"] == "2.25.999999999"
+ assert result["SeriesInstanceUID"] == "2.25.888888888"
+ assert result["StudyDate"] == "20260101"
+
+ # Real values must NOT appear anywhere
+ real_patient_id = "4654954"
+ real_birth_date = "19900615"
+ real_study_uid = "1.2.276.0.18.14.200.2.0.0.2.20250311.175028.78.91"
+ for val in result.values():
+ assert real_patient_id not in val
+ assert real_birth_date not in val
+ assert real_study_uid not in val
+
+
+# ---------------------------------------------------------------------------
+# _create_pending_volumes / _group_volumes tests
+# ---------------------------------------------------------------------------
+
+
+@pytest.mark.django_db
+def test_create_pending_volumes_deterministic_pseudonym():
+ """Seeded pseudonymizer with salt: volumes get deterministic pseudonyms."""
+ from adit.core.utils.pseudonymizer import Pseudonymizer
+
+ user = UserFactory.create()
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create()
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ pseudonym_salt="test-seed-123",
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+
+ now = timezone.now()
+ task = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=now,
+ partition_end=now + timedelta(hours=23, minutes=59, seconds=59),
+ partition_key="20240101",
+ )
+
+ from adit.core.utils.pseudonymizer import compute_pseudonym
+ from adit.mass_transfer.processors import _DETERMINISTIC_PSEUDONYM_LENGTH
+
+ ps = Pseudonymizer(seed="test-seed-123")
+ expected_pat1 = compute_pseudonym(
+ "test-seed-123", "PAT1", length=_DETERMINISTIC_PSEUDONYM_LENGTH
+ )
+ expected_pat2 = compute_pseudonym(
+ "test-seed-123", "PAT2", length=_DETERMINISTIC_PSEUDONYM_LENGTH
+ )
+
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ _make_discovered(patient_id="PAT2", study_uid="study-B", series_uid="s-2"),
+ ]
+
+ processor = MassTransferTaskProcessor(task)
+ volumes = processor._create_pending_volumes(series, job, ps)
+
+ assert len(volumes) == 2
+ assert volumes[0].pseudonym == expected_pat1
+ assert volumes[1].pseudonym == expected_pat2
+ assert all(v.status == MassTransferVolume.Status.PENDING for v in volumes)
+ assert all(v.pk is not None for v in volumes)
+
+ grouped = MassTransferTaskProcessor._group_volumes(volumes)
+ assert "PAT1" in grouped
+ assert "PAT2" in grouped
+
+
+def test_create_pending_volumes_no_anonymization(mocker: MockerFixture):
+ """Without pseudonymizer, volumes have empty pseudonym."""
+ processor = _make_processor(mocker)
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "bulk_create",
+ side_effect=lambda objs: objs,
+ )
+
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ _make_discovered(patient_id="PAT2", study_uid="study-B", series_uid="s-2"),
+ ]
+
+ mock_job = mocker.MagicMock()
+ mock_job.pseudonym_salt = ""
+
+ volumes = processor._create_pending_volumes(series, mock_job, None)
+
+ assert volumes[0].pseudonym == ""
+ assert volumes[1].pseudonym == ""
+
+
+def test_create_pending_volumes_random_assigns_per_study(mocker: MockerFixture):
+ """With pseudonymizer but no salt, volumes get per-study random pseudonyms."""
+ from adit.core.utils.pseudonymizer import Pseudonymizer
+
+ processor = _make_processor(mocker)
+ mocker.patch.object(
+ MassTransferVolume.objects,
+ "bulk_create",
+ side_effect=lambda objs: objs,
+ )
+
+ ps = Pseudonymizer()
+
+ mock_job = mocker.MagicMock()
+ mock_job.pseudonym_salt = ""
+
+ series = [
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-1"),
+ _make_discovered(patient_id="PAT1", study_uid="study-A", series_uid="s-2"),
+ _make_discovered(patient_id="PAT1", study_uid="study-B", series_uid="s-3"),
+ ]
+
+ volumes = processor._create_pending_volumes(series, mock_job, ps)
+
+ # Same study → same pseudonym
+ assert volumes[0].pseudonym == volumes[1].pseudonym
+ assert volumes[0].pseudonym != ""
+ # Different study → different pseudonym
+ assert volumes[0].pseudonym != volumes[2].pseudonym
+ assert volumes[2].pseudonym != ""
+
+
+# ---------------------------------------------------------------------------
+# _group_volumes tests
+# ---------------------------------------------------------------------------
+
+
+def test_group_volumes_multi_patient_multi_study():
+ """Volumes are grouped by patient_id -> study_instance_uid."""
+ now = timezone.now()
+ v1 = MassTransferVolume(
+ patient_id="PAT1",
+ study_instance_uid="study-A",
+ series_instance_uid="s-1",
+ study_datetime=now,
+ )
+ v2 = MassTransferVolume(
+ patient_id="PAT1",
+ study_instance_uid="study-A",
+ series_instance_uid="s-2",
+ study_datetime=now,
+ )
+ v3 = MassTransferVolume(
+ patient_id="PAT1",
+ study_instance_uid="study-B",
+ series_instance_uid="s-3",
+ study_datetime=now,
+ )
+ v4 = MassTransferVolume(
+ patient_id="PAT2",
+ study_instance_uid="study-C",
+ series_instance_uid="s-4",
+ study_datetime=now,
+ )
+
+ grouped = MassTransferTaskProcessor._group_volumes([v1, v2, v3, v4])
+
+ assert set(grouped.keys()) == {"PAT1", "PAT2"}
+ assert set(grouped["PAT1"].keys()) == {"study-A", "study-B"}
+ assert grouped["PAT1"]["study-A"] == [v1, v2]
+ assert grouped["PAT1"]["study-B"] == [v3]
+ assert grouped["PAT2"]["study-C"] == [v4]
+
+
+# ---------------------------------------------------------------------------
+# RetriableDicomError volume status tests
+# ---------------------------------------------------------------------------
+
+
+def test_retriable_error_saves_volume_as_error(mocker: MockerFixture, tmp_path: Path):
+ """RetriableDicomError should save the current volume as ERROR before propagating."""
+ processor = _make_process_env(mocker, tmp_path)
+ series = [_make_discovered(series_uid="s-1")]
+
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch.object(
+ processor,
+ "_export_series",
+ side_effect=RetriableDicomError("PACS connection lost"),
+ )
+
+ with pytest.raises(RetriableDicomError):
+ processor.process()
+
+ # volume.save() should have been called (via the finally block)
+ MassTransferVolume.save.assert_called()
+
+
+# ---------------------------------------------------------------------------
+# Partition cleanup DB integration test
+# ---------------------------------------------------------------------------
+
+
+@pytest.mark.django_db
+def test_partition_cleanup_deletes_folder_and_volumes(mocker: MockerFixture, mass_transfer_env):
+ """process() deletes the partition folder on disk and all volumes for that partition."""
+ env = mass_transfer_env
+ job, task, destination = env.job, env.task, env.destination
+
+ # Create pre-existing volumes
+ for uid in ["1.2.3.1", "1.2.3.2"]:
+ MassTransferVolume.objects.create(
+ job=job,
+ task=task,
+ partition_key="20240101",
+ patient_id="PAT1",
+ study_instance_uid="study-1",
+ series_instance_uid=uid,
+ modality="CT",
+ study_description="Brain CT",
+ series_description="Axial",
+ series_number=1,
+ study_datetime=timezone.now(),
+ status=MassTransferVolume.Status.EXPORTED,
+ log="",
+ )
+
+ # Create the partition folder with a file in it
+ partition_dir = _destination_base_dir(destination, job) / "20240101"
+ partition_dir.mkdir(parents=True, exist_ok=True)
+ (partition_dir / "some_file.dcm").write_text("dummy")
+
+ assert MassTransferVolume.objects.filter(job=job, partition_key="20240101").count() == 2
+
+ # Mock discovery to return a new series
+ series = [_make_discovered(patient_id="PAT1", series_uid="1.2.3.new")]
+ processor = MassTransferTaskProcessor(task)
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+ mocker.patch.object(processor, "_export_series", side_effect=_fake_export_success)
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+ # Old partition folder was deleted (process recreates it for the new export)
+ assert not (partition_dir / "some_file.dcm").exists()
+ # Old volumes were deleted, only the new one remains
+ vols = MassTransferVolume.objects.filter(job=job, partition_key="20240101")
+ assert vols.count() == 1
+ vol = vols.first()
+ assert vol is not None
+ assert vol.series_instance_uid == "1.2.3.new"
+
+
+# ---------------------------------------------------------------------------
+# MassTransferJob.get_filters() tests
+# ---------------------------------------------------------------------------
+
+
+@pytest.mark.django_db
+def test_get_filters_from_json():
+ """get_filters() returns FilterSpec objects from valid filters_json."""
+ user = UserFactory.create()
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+
+ # Valid JSON list of filter dicts
+ job.filters_json = [
+ {"modality": "CT", "min_age": 18},
+ {"modality": "MR", "series_description": "T1*"},
+ ]
+ job.save(update_fields=["filters_json"])
+
+ filters = job.get_filters()
+ assert len(filters) == 2
+ assert filters[0].modality == "CT"
+ assert filters[0].min_age == 18
+ assert filters[1].modality == "MR"
+ assert filters[1].series_description == "T1*"
+
+ # Empty list
+ job.filters_json = []
+ job.save(update_fields=["filters_json"])
+ assert job.get_filters() == []
+
+
+@pytest.mark.django_db
+def test_get_filters_empty():
+ """get_filters() returns [] when filters_json is None."""
+ user = UserFactory.create()
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+
+ job.filters_json = None
+ job.save(update_fields=["filters_json"])
+ assert job.get_filters() == []
+
+
+# ---------------------------------------------------------------------------
+# _destination_base_dir tests
+# ---------------------------------------------------------------------------
+
+
+@pytest.mark.django_db
+def test_destination_base_dir_creates_job_folder(tmp_path: Path):
+ """Output dir should include adit_{app}_{pk}_{date}_{owner} parent folder."""
+ user = UserFactory.create(username="rghosh")
+ destination = DicomFolderFactory.create(path=str(tmp_path))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2025, 3, 16),
+ end_date=date(2025, 3, 16),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+
+ result = _destination_base_dir(destination, job)
+
+ expected_name = f"adit_mass_transfer_{job.pk}_{job.created.strftime('%Y%m%d')}_rghosh"
+ assert result == tmp_path / expected_name
+ assert result.is_dir()
+
+
+@pytest.mark.django_db
+def test_destination_base_dir_is_idempotent(tmp_path: Path):
+ """Calling _destination_base_dir twice should not fail or create duplicates."""
+ user = UserFactory.create(username="testuser")
+ destination = DicomFolderFactory.create(path=str(tmp_path))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2025, 1, 1),
+ end_date=date(2025, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+
+ result1 = _destination_base_dir(destination, job)
+ result2 = _destination_base_dir(destination, job)
+
+ assert result1 == result2
+ assert result1.is_dir()
+
+
+def test_destination_base_dir_asserts_on_server_node(mocker: MockerFixture):
+ """Should raise AssertionError when node is not a FOLDER."""
+ node = mocker.MagicMock()
+ node.node_type = DicomNode.NodeType.SERVER
+ job = mocker.MagicMock()
+
+ with pytest.raises(AssertionError):
+ _destination_base_dir(node, job)
+
+
+@pytest.mark.django_db
+def test_destination_base_dir_sanitizes_username(tmp_path: Path):
+ """Usernames with special chars should be sanitized in the folder name."""
+ user = UserFactory.create(username="user/with:special")
+ destination = DicomFolderFactory.create(path=str(tmp_path))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2025, 1, 1),
+ end_date=date(2025, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ )
+
+ result = _destination_base_dir(destination, job)
+
+ # Should not contain path separators
+ folder_name = result.name
+ assert "/" not in folder_name
+ assert "\\" not in folder_name
+ assert result.is_dir()
+
+
+@pytest.mark.django_db
+def test_process_output_path_includes_job_folder(mocker: MockerFixture, tmp_path: Path):
+ """End-to-end: process() output path should include job-identifying folder."""
+ MassTransferSettings.objects.create()
+
+ user = UserFactory.create(username="researcher")
+ source = DicomServerFactory.create()
+ destination = DicomFolderFactory.create(path=str(tmp_path / "output"))
+ job = MassTransferJob.objects.create(
+ owner=user,
+ start_date=date(2024, 1, 1),
+ end_date=date(2024, 1, 1),
+ partition_granularity=MassTransferJob.PartitionGranularity.DAILY,
+ pseudonymize=False,
+ pseudonym_salt="",
+ )
+ job.filters_json = [{"modality": "CT"}]
+ job.save(update_fields=["filters_json"])
+
+ now = timezone.now()
+ task = MassTransferTask.objects.create(
+ job=job,
+ source=source,
+ destination=destination,
+ patient_id="",
+ study_uid="",
+ partition_start=now,
+ partition_end=now + timedelta(hours=23, minutes=59, seconds=59),
+ partition_key="20240101",
+ )
+
+ series = [_make_discovered(patient_id="PAT1", series_uid="1.2.3.4.5")]
+
+ processor = MassTransferTaskProcessor(task)
+ mocker.patch.object(processor, "_discover_series", return_value=series)
+ mocker.patch("adit.mass_transfer.processors.DicomOperator")
+
+ export_paths: list[Path] = []
+
+ def fake_export(op, s, path, subject_id, pseudonymizer):
+ export_paths.append(path)
+ return (1, "", "")
+
+ mocker.patch.object(processor, "_export_series", side_effect=fake_export)
+
+ result = processor.process()
+
+ assert result["status"] == MassTransferTask.Status.SUCCESS
+ assert len(export_paths) == 1
+
+ # The path should contain the job-identifying folder
+ expected_prefix = f"adit_mass_transfer_{job.pk}_{job.created.strftime('%Y%m%d')}_researcher"
+ assert expected_prefix in str(export_paths[0])
diff --git a/adit/mass_transfer/tests/test_queue_pending_tasks.py b/adit/mass_transfer/tests/test_queue_pending_tasks.py
new file mode 100644
index 000000000..2f00665b4
--- /dev/null
+++ b/adit/mass_transfer/tests/test_queue_pending_tasks.py
@@ -0,0 +1,155 @@
+import pytest
+from procrastinate.contrib.django.models import ProcrastinateJob
+
+from adit.core.models import DicomJob, DicomTask
+
+from ..factories import MassTransferJobFactory, MassTransferTaskFactory
+from ..tasks import queue_mass_transfer_tasks
+
+
+@pytest.mark.django_db(transaction=True)
+def test_queue_pending_tasks_defers_background_job():
+ """queue_pending_tasks() should defer a single job on the default queue
+ without queuing individual tasks inline."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+ MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ job.queue_pending_tasks()
+
+ # A single queueing job should be deferred on the default queue
+ queueing_jobs = ProcrastinateJob.objects.filter(
+ task_name="adit.mass_transfer.tasks.queue_mass_transfer_tasks"
+ )
+ assert queueing_jobs.count() == 1
+ queueing_job = queueing_jobs.first()
+ assert queueing_job is not None
+ assert queueing_job.queue_name == "default"
+
+ # Individual tasks should NOT have been queued yet
+ for task in job.tasks.all():
+ assert task.queued_job is None
+
+
+@pytest.mark.django_db(transaction=True)
+def test_background_job_queues_all_pending_tasks():
+ """After queue_mass_transfer_tasks runs, all pending tasks should have
+ queued_job set and be placed on the mass_transfer queue."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ task1 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+ task2 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task1.refresh_from_db()
+ task2.refresh_from_db()
+ assert task1.queued_job is not None
+ assert task2.queued_job is not None
+
+ for task in [task1, task2]:
+ procrastinate_job = ProcrastinateJob.objects.get(pk=task.queued_job_id)
+ assert procrastinate_job.queue_name == "mass_transfer"
+
+
+@pytest.mark.django_db(transaction=True)
+def test_background_job_skips_canceled_tasks():
+ """Canceled tasks should not be queued."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ pending_task = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+ canceled_task = MassTransferTaskFactory.create(status=DicomTask.Status.CANCELED, job=job)
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ pending_task.refresh_from_db()
+ canceled_task.refresh_from_db()
+ assert pending_task.queued_job is not None
+ assert canceled_task.queued_job is None
+
+
+@pytest.mark.django_db(transaction=True)
+def test_background_job_is_idempotent():
+ """Calling queue_mass_transfer_tasks twice should not double-queue tasks."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ task1 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+ task2 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task1.refresh_from_db()
+ task2.refresh_from_db()
+ first_queued_job_1 = task1.queued_job_id
+ first_queued_job_2 = task2.queued_job_id
+ assert first_queued_job_1 is not None
+ assert first_queued_job_2 is not None
+
+ # Call again — tasks already have queued_job set, so they should be skipped
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task1.refresh_from_db()
+ task2.refresh_from_db()
+ assert task1.queued_job_id == first_queued_job_1
+ assert task2.queued_job_id == first_queued_job_2
+
+
+@pytest.mark.django_db(transaction=True)
+def test_background_job_skips_deleted_job():
+ """If the job is deleted before the background task runs, it should
+ gracefully skip."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ job_id = job.pk
+ job.delete()
+
+ # Should not raise
+ queue_mass_transfer_tasks(job_id=job_id)
+
+
+@pytest.mark.django_db(transaction=True)
+def test_background_job_skips_non_pending_job():
+ """If the job status changes before the background task runs (e.g. cancel),
+ tasks should not be queued."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ task = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ # Simulate cancel happening before the background job runs
+ job.status = DicomJob.Status.CANCELED
+ job.save()
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task.refresh_from_db()
+ assert task.queued_job is None
+
+
+@pytest.mark.django_db(transaction=True)
+def test_queue_mass_transfer_tasks_sets_queued_job():
+ """Calling queue_mass_transfer_tasks directly should set queued_job
+ on each pending task and place them on the mass_transfer queue."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING)
+ task1 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+ task2 = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task1.refresh_from_db()
+ task2.refresh_from_db()
+ assert task1.queued_job is not None
+ assert task2.queued_job is not None
+
+ for task in [task1, task2]:
+ procrastinate_job = ProcrastinateJob.objects.get(pk=task.queued_job_id)
+ assert procrastinate_job.queue_name == "mass_transfer"
+
+
+@pytest.mark.django_db(transaction=True)
+def test_queue_mass_transfer_tasks_uses_urgent_priority():
+ """Urgent jobs should queue tasks with urgent priority."""
+ job = MassTransferJobFactory.create(status=DicomJob.Status.PENDING, urgent=True)
+ task = MassTransferTaskFactory.create(status=DicomTask.Status.PENDING, job=job)
+
+ queue_mass_transfer_tasks(job_id=job.pk)
+
+ task.refresh_from_db()
+ procrastinate_job = ProcrastinateJob.objects.get(pk=task.queued_job_id)
+ assert procrastinate_job.priority == job.urgent_priority
diff --git a/adit/mass_transfer/urls.py b/adit/mass_transfer/urls.py
new file mode 100644
index 000000000..e84594b4d
--- /dev/null
+++ b/adit/mass_transfer/urls.py
@@ -0,0 +1,81 @@
+from django.urls import path
+
+from .views import (
+ MassTransferJobCancelView,
+ MassTransferJobCreateView,
+ MassTransferJobCsvExportView,
+ MassTransferJobDeleteView,
+ MassTransferJobDetailView,
+ MassTransferJobListView,
+ MassTransferJobRestartView,
+ MassTransferJobResumeView,
+ MassTransferJobRetryView,
+ MassTransferJobVerifyView,
+ MassTransferTaskDeleteView,
+ MassTransferTaskDetailView,
+ MassTransferTaskKillView,
+ MassTransferTaskResetView,
+ MassTransferUpdatePreferencesView,
+)
+
+urlpatterns = [
+ path(
+ "preferences/",
+ MassTransferUpdatePreferencesView.as_view(),
+ name="mass_transfer_update_preferences",
+ ),
+ path("jobs/", MassTransferJobListView.as_view(), name="mass_transfer_job_list"),
+ path("jobs/new/", MassTransferJobCreateView.as_view(), name="mass_transfer_job_create"),
+ path("jobs//", MassTransferJobDetailView.as_view(), name="mass_transfer_job_detail"),
+ path(
+ "jobs//csv/",
+ MassTransferJobCsvExportView.as_view(),
+ name="mass_transfer_job_csv_export",
+ ),
+ path(
+ "jobs//delete/",
+ MassTransferJobDeleteView.as_view(),
+ name="mass_transfer_job_delete",
+ ),
+ path(
+ "jobs//verify/",
+ MassTransferJobVerifyView.as_view(),
+ name="mass_transfer_job_verify",
+ ),
+ path(
+ "jobs//cancel/",
+ MassTransferJobCancelView.as_view(),
+ name="mass_transfer_job_cancel",
+ ),
+ path(
+ "jobs//resume/",
+ MassTransferJobResumeView.as_view(),
+ name="mass_transfer_job_resume",
+ ),
+ path(
+ "jobs//retry/",
+ MassTransferJobRetryView.as_view(),
+ name="mass_transfer_job_retry",
+ ),
+ path(
+ "jobs//restart/",
+ MassTransferJobRestartView.as_view(),
+ name="mass_transfer_job_restart",
+ ),
+ path("tasks//", MassTransferTaskDetailView.as_view(), name="mass_transfer_task_detail"),
+ path(
+ "tasks//delete/",
+ MassTransferTaskDeleteView.as_view(),
+ name="mass_transfer_task_delete",
+ ),
+ path(
+ "tasks//reset/",
+ MassTransferTaskResetView.as_view(),
+ name="mass_transfer_task_reset",
+ ),
+ path(
+ "tasks//kill/",
+ MassTransferTaskKillView.as_view(),
+ name="mass_transfer_task_kill",
+ ),
+]
diff --git a/adit/mass_transfer/utils/__init__.py b/adit/mass_transfer/utils/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/adit/mass_transfer/utils/partitions.py b/adit/mass_transfer/utils/partitions.py
new file mode 100644
index 000000000..9ae415869
--- /dev/null
+++ b/adit/mass_transfer/utils/partitions.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from datetime import date, datetime, time, timedelta
+
+from django.utils import timezone
+
+
+@dataclass(frozen=True)
+class PartitionWindow:
+ start: datetime
+ end: datetime
+ key: str
+
+
+def build_partitions(
+ start_date: date,
+ end_date: date,
+ granularity: str,
+) -> list[PartitionWindow]:
+ """Split a date range into non-overlapping partition windows.
+
+ Each partition covers one day (daily) or up to seven days (weekly).
+ Returns a list of PartitionWindow objects ordered chronologically.
+ The last partition may be shorter than the step if *end_date* does not
+ align with a full window.
+ """
+ if end_date < start_date:
+ raise ValueError("End date must be on or after the start date.")
+
+ if granularity not in {"daily", "weekly"}:
+ raise ValueError(f"Invalid granularity: {granularity}")
+
+ if granularity == "daily":
+ step = timedelta(days=1)
+ else:
+ step = timedelta(days=7)
+
+ tz = timezone.get_current_timezone()
+ windows: list[PartitionWindow] = []
+
+ current = start_date
+ while current <= end_date:
+ window_end_date = min(current + step - timedelta(days=1), end_date)
+
+ start_dt = timezone.make_aware(datetime.combine(current, time(0, 0, 0)), tz)
+ end_dt = timezone.make_aware(datetime.combine(window_end_date, time(23, 59, 59)), tz)
+
+ if current == window_end_date:
+ key = current.strftime("%Y%m%d")
+ else:
+ key = f"{current:%Y%m%d}-{window_end_date:%Y%m%d}"
+
+ windows.append(PartitionWindow(start=start_dt, end=end_dt, key=key))
+ current = window_end_date + timedelta(days=1)
+
+ return windows
diff --git a/adit/mass_transfer/utils/testing_helpers.py b/adit/mass_transfer/utils/testing_helpers.py
new file mode 100644
index 000000000..a8edd4b45
--- /dev/null
+++ b/adit/mass_transfer/utils/testing_helpers.py
@@ -0,0 +1,9 @@
+from adit_radis_shared.accounts.factories import GroupFactory
+from adit_radis_shared.common.utils.testing_helpers import add_permission
+
+
+def create_mass_transfer_group():
+ group = GroupFactory.create(name="Radiologists")
+ add_permission(group, "mass_transfer", "add_masstransferjob")
+ add_permission(group, "mass_transfer", "view_masstransferjob")
+ return group
diff --git a/adit/mass_transfer/views.py b/adit/mass_transfer/views.py
new file mode 100644
index 000000000..346306a20
--- /dev/null
+++ b/adit/mass_transfer/views.py
@@ -0,0 +1,203 @@
+import csv
+from typing import Any, cast
+
+from adit_radis_shared.common.mixins import PageSizeSelectMixin, RelatedFilterMixin
+from adit_radis_shared.common.views import BaseUpdatePreferencesView
+from django.conf import settings
+from django.contrib.auth.mixins import LoginRequiredMixin
+from django.db.models import QuerySet
+from django.http import HttpResponse
+from django.shortcuts import get_object_or_404
+from django.urls import reverse_lazy
+from django.views import View
+from django_tables2 import SingleTableMixin
+
+from adit.core.views import (
+ DicomJobCancelView,
+ DicomJobCreateView,
+ DicomJobDeleteView,
+ DicomJobDetailView,
+ DicomJobRestartView,
+ DicomJobResumeView,
+ DicomJobRetryView,
+ DicomJobVerifyView,
+ DicomTaskDeleteView,
+ DicomTaskDetailView,
+ DicomTaskKillView,
+ DicomTaskResetView,
+ TransferJobListView,
+)
+
+from .filters import MassTransferJobFilter, MassTransferTaskFilter, MassTransferVolumeFilter
+from .forms import MassTransferJobForm
+from .mixins import MassTransferLockedMixin
+from .models import (
+ MassTransferJob,
+ MassTransferTask,
+ MassTransferVolume,
+)
+from .tables import MassTransferJobTable, MassTransferTaskTable, MassTransferVolumeTable
+
+MASS_TRANSFER_SOURCE = "mass_transfer_source"
+MASS_TRANSFER_DESTINATION = "mass_transfer_destination"
+MASS_TRANSFER_GRANULARITY = "mass_transfer_granularity"
+MASS_TRANSFER_SEND_FINISHED_MAIL = "mass_transfer_send_finished_mail"
+
+
+class MassTransferUpdatePreferencesView(MassTransferLockedMixin, BaseUpdatePreferencesView):
+ allowed_keys = [
+ MASS_TRANSFER_SOURCE,
+ MASS_TRANSFER_DESTINATION,
+ MASS_TRANSFER_GRANULARITY,
+ MASS_TRANSFER_SEND_FINISHED_MAIL,
+ ]
+
+
+class MassTransferJobListView(MassTransferLockedMixin, TransferJobListView):
+ model = MassTransferJob
+ table_class = MassTransferJobTable
+ filterset_class = MassTransferJobFilter
+ template_name = "mass_transfer/mass_transfer_job_list.html"
+
+
+class MassTransferJobCreateView(MassTransferLockedMixin, DicomJobCreateView):
+ model = MassTransferJob
+ form_class = MassTransferJobForm
+ template_name = "mass_transfer/mass_transfer_job_form.html"
+ permission_required = "mass_transfer.add_masstransferjob"
+ object: MassTransferJob
+
+ def get_initial(self) -> dict[str, Any]:
+ initial = super().get_initial()
+ preferences: dict[str, Any] = self.request.user.preferences
+
+ source = preferences.get(MASS_TRANSFER_SOURCE)
+ if source is not None:
+ initial["source"] = source
+
+ destination = preferences.get(MASS_TRANSFER_DESTINATION)
+ if destination is not None:
+ initial["destination"] = destination
+
+ granularity = preferences.get(MASS_TRANSFER_GRANULARITY)
+ if granularity is not None:
+ initial["partition_granularity"] = granularity
+
+ send_finished_mail = preferences.get(MASS_TRANSFER_SEND_FINISHED_MAIL)
+ if send_finished_mail is not None:
+ initial["send_finished_mail"] = send_finished_mail
+
+ return initial
+
+ def form_valid(self, form):
+ return super().form_valid(form, settings.START_MASS_TRANSFER_UNVERIFIED)
+
+
+class MassTransferJobDetailView(MassTransferLockedMixin, DicomJobDetailView):
+ table_class = MassTransferTaskTable
+ filterset_class = MassTransferTaskFilter
+ model = MassTransferJob
+ context_object_name = "job"
+ template_name = "mass_transfer/mass_transfer_job_detail.html"
+
+
+class MassTransferJobCsvExportView(LoginRequiredMixin, MassTransferLockedMixin, View):
+ """Streams a full CSV export of all volumes for a mass transfer job."""
+
+ COLUMNS = [
+ "partition_key",
+ "pseudonym",
+ "patient_id",
+ "accession_number",
+ "study_instance_uid",
+ "study_instance_uid_pseudonymized",
+ "series_instance_uid",
+ "series_instance_uid_pseudonymized",
+ "modality",
+ "study_description",
+ "series_description",
+ "series_number",
+ "study_datetime",
+ "institution_name",
+ "number_of_images",
+ ]
+
+ def get(self, request, pk):
+ if request.user.is_staff:
+ qs = MassTransferJob.objects.all()
+ else:
+ qs = MassTransferJob.objects.filter(owner=request.user)
+
+ job = get_object_or_404(qs, pk=pk)
+
+ volumes = MassTransferVolume.objects.filter(job=job).values_list(*self.COLUMNS)
+
+ response = HttpResponse(content_type="text/csv")
+ response["Content-Disposition"] = f'attachment; filename="mass_transfer_job_{job.pk}.csv"'
+
+ writer = csv.writer(response)
+ if job.pseudonym_salt:
+ response.write(f"# Pseudonym salt: {job.pseudonym_salt}\n")
+ writer.writerow(self.COLUMNS)
+ for row in volumes.iterator():
+ writer.writerow(row)
+
+ return response
+
+
+class MassTransferJobDeleteView(MassTransferLockedMixin, DicomJobDeleteView):
+ model = MassTransferJob
+ success_url = cast(str, reverse_lazy("mass_transfer_job_list"))
+
+
+class MassTransferJobVerifyView(MassTransferLockedMixin, DicomJobVerifyView):
+ model = MassTransferJob
+
+
+class MassTransferJobCancelView(MassTransferLockedMixin, DicomJobCancelView):
+ model = MassTransferJob
+
+
+class MassTransferJobResumeView(MassTransferLockedMixin, DicomJobResumeView):
+ model = MassTransferJob
+
+
+class MassTransferJobRetryView(MassTransferLockedMixin, DicomJobRetryView):
+ model = MassTransferJob
+
+
+class MassTransferJobRestartView(MassTransferLockedMixin, DicomJobRestartView):
+ model = MassTransferJob
+
+
+class MassTransferTaskDetailView(
+ MassTransferLockedMixin,
+ SingleTableMixin,
+ RelatedFilterMixin,
+ PageSizeSelectMixin,
+ DicomTaskDetailView,
+):
+ model = MassTransferTask
+ job_url_name = "mass_transfer_job_detail"
+ template_name = "mass_transfer/mass_transfer_task_detail.html"
+ table_class = MassTransferVolumeTable
+ filterset_class = MassTransferVolumeFilter
+ table_pagination = {"per_page": 25}
+
+ def get_filter_queryset(self) -> QuerySet[MassTransferVolume]:
+ task = cast(MassTransferTask, self.get_object())
+ return task.volumes
+
+
+class MassTransferTaskDeleteView(MassTransferLockedMixin, DicomTaskDeleteView):
+ model = MassTransferTask
+
+
+class MassTransferTaskResetView(MassTransferLockedMixin, DicomTaskResetView):
+ model = MassTransferTask
+
+
+class MassTransferTaskKillView(MassTransferLockedMixin, DicomTaskKillView):
+ model = MassTransferTask
+
+
diff --git a/adit/settings/base.py b/adit/settings/base.py
index e08ade256..4dff02e8a 100644
--- a/adit/settings/base.py
+++ b/adit/settings/base.py
@@ -49,6 +49,8 @@
CSRF_TRUSTED_ORIGINS = env.list("DJANGO_CSRF_TRUSTED_ORIGINS")
+SESSION_COOKIE_NAME = env.str("DJANGO_SESSION_COOKIE_NAME", default="sessionid")
+
INSTALLED_APPS = [
"daphne",
"whitenoise.runserver_nostatic",
@@ -70,6 +72,7 @@
"loginas",
"django_cotton.apps.SimpleAppConfig",
"block_fragments.apps.SimpleAppConfig",
+ "codemirror",
"crispy_forms",
"crispy_bootstrap5",
"django_htmx",
@@ -82,6 +85,7 @@
"adit.selective_transfer.apps.SelectiveTransferConfig",
"adit.batch_query.apps.BatchQueryConfig",
"adit.batch_transfer.apps.BatchTransferConfig",
+ "adit.mass_transfer.apps.MassTransferConfig",
"adit.upload.apps.UploadConfig",
"adit.dicom_explorer.apps.DicomExplorerConfig",
"adit.dicom_web.apps.DicomWebConfig",
@@ -305,6 +309,31 @@
# django-templates2
DJANGO_TABLES2_TEMPLATE = "common/_django_tables2.html"
+# django-codemirror — assets vendored in mass_transfer/static/mass_transfer/vendor/codemirror/
+_CM = "mass_transfer/vendor/codemirror"
+CODEMIRROR_CSS = [
+ f"{_CM}/codemirror.min.css",
+ f"{_CM}/addon/lint/lint.min.css",
+]
+CODEMIRROR_JS = [
+ f"{_CM}/jsonlint.min.js",
+ f"{_CM}/codemirror.min.js",
+ f"{_CM}/mode/javascript/javascript.min.js",
+ f"{_CM}/addon/edit/matchbrackets.min.js",
+ f"{_CM}/addon/edit/closebrackets.min.js",
+ f"{_CM}/addon/lint/lint.min.js",
+ f"{_CM}/addon/lint/json-lint.min.js",
+]
+CODEMIRROR_CONFIG = {
+ "lineNumbers": True,
+ "matchBrackets": True,
+ "autoCloseBrackets": True,
+ "tabSize": 2,
+ "indentWithTabs": False,
+ "gutters": ["CodeMirror-lint-markers"],
+ "lint": True,
+}
+
# The salt that is used for hashing new tokens in the token authentication app.
# Cave, changing the salt after some tokens were already generated makes them all invalid!
TOKEN_AUTHENTICATION_SALT = env.str("TOKEN_AUTHENTICATION_SALT")
@@ -357,6 +386,7 @@
START_SELECTIVE_TRANSFER_UNVERIFIED = True
START_BATCH_QUERY_UNVERIFIED = True
START_BATCH_TRANSFER_UNVERIFIED = True
+START_MASS_TRANSFER_UNVERIFIED = True
# Priorities of dicom tasks
# Selective transfers have the highest priority as those are
@@ -368,6 +398,8 @@
BATCH_TRANSFER_URGENT_PRIORITY = 6
BATCH_QUERY_DEFAULT_PRIORITY = 3
BATCH_QUERY_URGENT_PRIORITY = 7
+MASS_TRANSFER_DEFAULT_PRIORITY = 1
+MASS_TRANSFER_URGENT_PRIORITY = 5
# The priority for stalled jobs that are retried.
STALLED_JOBS_RETRY_PRIORITY = 10
@@ -383,6 +415,7 @@
# The maximum number of results (patients or studies) in dicom_explorer
DICOM_EXPLORER_RESULT_LIMIT = 101
+
# The timeout in dicom_explorer a DICOM server must respond
DICOM_EXPLORER_RESPONSE_TIMEOUT = 3 # seconds
diff --git a/adit/urls.py b/adit/urls.py
index 5f833509c..5d1b67ab7 100644
--- a/adit/urls.py
+++ b/adit/urls.py
@@ -27,6 +27,7 @@
path("selective-transfer/", include("adit.selective_transfer.urls")),
path("batch-query/", include("adit.batch_query.urls")),
path("batch-transfer/", include("adit.batch_transfer.urls")),
+ path("mass-transfer/", include("adit.mass_transfer.urls")),
path("upload/", include("adit.upload.urls")),
path("dicom-explorer/", include("adit.dicom_explorer.urls")),
path("token-authentication/", include("adit_radis_shared.token_authentication.urls")),
diff --git a/cli.py b/cli.py
index 26323527b..1225f15e2 100755
--- a/cli.py
+++ b/cli.py
@@ -37,6 +37,40 @@
app.command()(commands.try_github_actions)
+@app.command()
+def stack_deploy_staging():
+ """Build images and deploy staging stack with Docker Swarm"""
+
+ helper = cli_helper.CommandHelper()
+ helper.prepare_environment()
+
+ env = helper.load_config_from_env_file()
+ env["PROJECT_VERSION"] = helper.get_local_project_version()
+
+ base_file = helper.get_compose_base_file()
+ staging_file = helper.root_path / "docker-compose.staging.yml"
+ stack_name = f"{helper.project_id}_staging"
+
+ # Build images first (docker stack deploy does not support build)
+ build_cmd = f"docker compose -f {base_file} -f {staging_file} build"
+ helper.execute_cmd(build_cmd, env={**env, "COMPOSE_BAKE": "true"})
+
+ deploy_cmd = "docker stack deploy --detach"
+ deploy_cmd += f" -c {base_file}"
+ deploy_cmd += f" -c {staging_file}"
+ deploy_cmd += f" {stack_name}"
+ helper.execute_cmd(deploy_cmd, env=env)
+
+
+@app.command()
+def stack_rm_staging():
+ """Remove staging stack from Docker Swarm"""
+
+ helper = cli_helper.CommandHelper()
+ stack_name = f"{helper.project_id}_staging"
+ helper.execute_cmd(f"docker stack rm {stack_name}")
+
+
@app.command()
def populate_orthancs(
reset: Annotated[bool, typer.Option(help="Clear Orthancs before populate")] = False,
diff --git a/docker-compose.base.yml b/docker-compose.base.yml
index 2a122f109..fc2b3f1c2 100644
--- a/docker-compose.base.yml
+++ b/docker-compose.base.yml
@@ -64,6 +64,10 @@ services:
<<: *default-app
hostname: dicom_worker.local
+ mass_transfer_worker:
+ <<: *default-app
+ hostname: mass_transfer_worker.local
+
receiver:
<<: *default-app
hostname: receiver.local
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml
index b852debcb..2daa6658f 100644
--- a/docker-compose.dev.yml
+++ b/docker-compose.dev.yml
@@ -66,6 +66,15 @@ services:
./manage.py bg_worker -l debug -q dicom --autoreload
"
+ mass_transfer_worker:
+ <<: *default-app
+ image: adit_dev-mass_transfer_worker:latest
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py bg_worker -l debug -q mass_transfer --autoreload
+ "
+
receiver:
<<: *default-app
image: adit_dev-receiver:latest
diff --git a/docker-compose.override.yml.example b/docker-compose.override.yml.example
index a129db932..7336fd80b 100644
--- a/docker-compose.override.yml.example
+++ b/docker-compose.override.yml.example
@@ -19,5 +19,7 @@ services:
<<: *observability
dicom_worker:
<<: *observability
+ mass_transfer_worker:
+ <<: *observability
receiver:
<<: *observability
diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml
index 0c89945d0..e052d7d7f 100644
--- a/docker-compose.prod.yml
+++ b/docker-compose.prod.yml
@@ -78,6 +78,17 @@ services:
<<: *deploy
replicas: ${DICOM_WORKER_REPLICAS:-3}
+ mass_transfer_worker:
+ <<: *default-app
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py bg_worker -q mass_transfer
+ "
+ deploy:
+ <<: *deploy
+ replicas: ${MASS_TRANSFER_WORKER_REPLICAS:-1}
+
receiver:
<<: *default-app
ports:
diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml
new file mode 100644
index 000000000..d9780034e
--- /dev/null
+++ b/docker-compose.staging.yml
@@ -0,0 +1,130 @@
+# Staging environment - Dev mode on Docker Swarm for testing worker scaling
+#
+# - Uses development settings (DEBUG=True, debug toolbar, console email)
+# - Runs on localhost:8001 (different port from dev:8000)
+# - Starts with 3 mass_transfer_workers by default (vs 1 in dev)
+# - Just use your existing .env file (staging ports already defined in example.env)
+#
+# Deploy: uv run cli stack-deploy-staging
+# Access: http://localhost:8001
+
+x-app: &default-app
+ build:
+ target: development
+ environment:
+ # Exact same as dev - development Django settings
+ DJANGO_SETTINGS_MODULE: adit.settings.development
+ DJANGO_INTERNAL_IPS: ${DJANGO_INTERNAL_IPS:?}
+ FORCE_DEBUG_TOOLBAR: ${FORCE_DEBUG_TOOLBAR:-true}
+ DJANGO_SESSION_COOKIE_NAME: sessionid_staging
+ REMOTE_DEBUGGING_ENABLED: ${REMOTE_DEBUGGING_ENABLED:-false}
+ REMOTE_DEBUGGING_PORT: ${REMOTE_DEBUGGING_PORT:-5678}
+
+x-deploy: &deploy
+ replicas: 1
+ restart_policy:
+ condition: on-failure
+ max_attempts: 3
+
+services:
+ init:
+ <<: *default-app
+ image: adit_staging-web:latest
+ deploy:
+ replicas: 0
+
+ web:
+ <<: *default-app
+ image: adit_staging-web:latest
+ ports:
+ - ${WEB_STAGING_PORT:-8001}:8000
+ - ${REMOTE_DEBUGGING_STAGING_PORT:-5679}:5678
+ # Same as dev - plain HTTP via runserver
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py migrate &&
+ ./manage.py create_superuser &&
+ ./manage.py create_example_users &&
+ ./manage.py create_example_groups &&
+ ./manage.py populate_example_data &&
+ wait-for-it -s orthanc1.local:6501 -t 60 &&
+ ./manage.py populate_orthancs &&
+ ./manage.py retry_stalled_jobs &&
+ ./manage.py runserver 0.0.0.0:8000
+ "
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8000/health/"]
+ deploy:
+ <<: *deploy
+ replicas: ${WEB_STAGING_REPLICAS:-1}
+
+ default_worker:
+ <<: *default-app
+ image: adit_staging-default_worker:latest
+ # Same as dev - debug logging, autoreload
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py bg_worker -l debug -q default --autoreload
+ "
+ deploy:
+ <<: *deploy
+ replicas: 1
+
+ dicom_worker:
+ <<: *default-app
+ image: adit_staging-dicom_worker:latest
+ # Same as dev - debug logging, autoreload
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py bg_worker -l debug -q dicom --autoreload
+ "
+ deploy:
+ <<: *deploy
+ replicas: ${DICOM_WORKER_STAGING_REPLICAS:-1}
+
+ mass_transfer_worker:
+ <<: *default-app
+ image: adit_staging-mass_transfer_worker:latest
+ # Same as dev - debug logging, autoreload
+ command: >
+ bash -c "
+ wait-for-it -s postgres.local:5432 -t 60 &&
+ ./manage.py bg_worker -l debug -q mass_transfer --autoreload
+ "
+ deploy:
+ <<: *deploy
+ replicas: ${MASS_TRANSFER_WORKER_STAGING_REPLICAS:-3}
+
+ receiver:
+ <<: *default-app
+ image: adit_staging-receiver:latest
+ ports:
+ - ${RECEIVER_STAGING_PORT:-11123}:11112
+ # Same as dev - autoreload
+ command: |
+ ./manage.py receiver --autoreload
+ deploy:
+ <<: *deploy
+
+ postgres:
+ environment:
+ POSTGRES_PASSWORD: postgres
+ ports:
+ - ${POSTGRES_STAGING_PORT:-5433}:5432
+ deploy:
+ <<: *deploy
+
+ orthanc1:
+ ports:
+ - ${ORTHANC1_STAGING_PORT:-7503}:7501
+ deploy:
+ <<: *deploy
+
+ orthanc2:
+ ports:
+ - ${ORTHANC2_STAGING_PORT:-7504}:7502
+ deploy:
+ <<: *deploy
diff --git a/example.env b/example.env
index cc6f0d7af..f9e749696 100644
--- a/example.env
+++ b/example.env
@@ -6,6 +6,14 @@ ENVIRONMENT=development
WEB_DEV_PORT=8000
POSTGRES_DEV_PORT=5432
+# Ports that will be mapped to the host during staging (Docker Swarm with dev settings).
+WEB_STAGING_PORT=8001
+POSTGRES_STAGING_PORT=5433
+RECEIVER_STAGING_PORT=11123
+ORTHANC1_STAGING_PORT=7503
+ORTHANC2_STAGING_PORT=7504
+REMOTE_DEBUGGING_STAGING_PORT=5679
+
# Ports that will be mapped to the host during production.
WEB_HTTP_PORT=80
WEB_HTTPS_PORT=443
@@ -89,9 +97,15 @@ RECEIVER_AE_TITLE="ADIT1DEV"
# This does not affect downloads using the ADIT client.
EXCLUDE_MODALITIES="PR,SR"
-# Replicas of the services that can be scaled (production only).
+# Replicas of the services that can be scaled (staging).
+WEB_STAGING_REPLICAS=1
+DICOM_WORKER_STAGING_REPLICAS=1
+MASS_TRANSFER_WORKER_STAGING_REPLICAS=3
+
+# Replicas of the services that can be scaled (production).
WEB_REPLICAS=5
DICOM_WORKER_REPLICAS=3
+MASS_TRANSFER_WORKER_REPLICAS=5
# The directory where download folders are mounted.
MOUNT_DIR="/mnt"
diff --git a/globals.d.ts b/globals.d.ts
index ac440bd43..b086e0b23 100644
--- a/globals.d.ts
+++ b/globals.d.ts
@@ -5,4 +5,5 @@ declare global {
var dcmjs: any;
var Anonymizer: any;
var public: any;
+ var updatePreferences: any;
}
diff --git a/pyproject.toml b/pyproject.toml
index 6c63ca032..936081eaa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,6 +20,7 @@ dependencies = [
"dicomweb-client>=0.60.0",
"Django>=5.1.6",
"django-block-fragments>=0.1.1",
+ "django-codemirror>=1.0.1",
"django-cotton>=1.6.0",
"django-crispy-forms>=2.3",
"django-dbbackup>=4.2.1",
@@ -42,10 +43,11 @@ dependencies = [
"procrastinate[django]>=3.0.2",
"psycopg[binary]>=3.2.5",
"pyarrow>=19.0.1",
+ "pydantic>=2.12.5",
"pydicom>=2.4.4",
"pynetdicom>=2.1.1",
- "stream-zip>=0.0.83",
"stamina>=24.2.0",
+ "stream-zip>=0.0.83",
"Twisted[tls,http2]>=24.11.0",
"wait-for-it>=2.3.0",
"watchfiles>=1.0.4",
diff --git a/scripts/csv_to_mass_transfer_filters.py b/scripts/csv_to_mass_transfer_filters.py
new file mode 100644
index 000000000..3b395f2dc
--- /dev/null
+++ b/scripts/csv_to_mass_transfer_filters.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+"""Convert a CSV file of mass transfer filters into JSON compatible with
+the MassTransferJob.filters_json form field.
+
+CSV columns (all optional, header names must match):
+ study_description, series_description, modality, institution_name
+
+Usage examples:
+ python scripts/csv_to_mass_transfer_filters.py filters.csv
+ python scripts/csv_to_mass_transfer_filters.py filters.csv --delimiter ";"
+ python scripts/csv_to_mass_transfer_filters.py filters.csv --min-age 18
+ python scripts/csv_to_mass_transfer_filters.py filters.csv --min-age 18 --max-age 90
+ python scripts/csv_to_mass_transfer_filters.py filters.csv --min-series-instances 5
+ python scripts/csv_to_mass_transfer_filters.py filters.csv -o output.json
+"""
+
+from __future__ import annotations
+
+import argparse
+import csv
+import json
+import sys
+from pathlib import Path
+
+KNOWN_COLUMNS = {"study_description", "series_description", "modality", "institution_name"}
+
+
+def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description="Convert a CSV of mass transfer filters to JSON.",
+ )
+ parser.add_argument("csv_file", type=Path, help="Path to the input CSV file")
+ parser.add_argument(
+ "-o",
+ "--output",
+ type=Path,
+ default=None,
+ help="Output JSON file path (default: stdout)",
+ )
+ parser.add_argument(
+ "--min-age",
+ type=int,
+ default=None,
+ help="Set a constant min_age for every filter",
+ )
+ parser.add_argument(
+ "--max-age",
+ type=int,
+ default=None,
+ help="Set a constant max_age for every filter",
+ )
+ parser.add_argument(
+ "--min-series-instances",
+ type=int,
+ default=None,
+ help="Set a constant min_number_of_series_related_instances for every filter",
+ )
+ parser.add_argument(
+ "-d",
+ "--delimiter",
+ default=",",
+ help="CSV column delimiter (default: ',')",
+ )
+ return parser.parse_args(argv)
+
+
+def csv_to_filters(
+ csv_path: Path,
+ *,
+ delimiter: str = ",",
+ min_age: int | None = None,
+ max_age: int | None = None,
+ min_number_of_series_related_instances: int | None = None,
+) -> list[dict]:
+ with csv_path.open(newline="", encoding="utf-8-sig") as f:
+ reader = csv.DictReader(f, delimiter=delimiter)
+ if reader.fieldnames is None:
+ raise SystemExit(f"Error: {csv_path} appears to be empty or has no header row.")
+
+ normalised_headers = {h.strip().lower(): h for h in reader.fieldnames}
+ unknown = set(normalised_headers) - KNOWN_COLUMNS - {""}
+ if unknown:
+ print(
+ f"Warning: ignoring unknown columns: {', '.join(sorted(unknown))}",
+ file=sys.stderr,
+ )
+
+ filters: list[dict] = []
+ for row_num, row in enumerate(reader, start=2):
+ entry: dict = {}
+ for col in KNOWN_COLUMNS:
+ original_header = normalised_headers.get(col)
+ if original_header is not None:
+ value = row[original_header].strip()
+ if value:
+ entry[col] = value
+
+ if min_age is not None:
+ entry["min_age"] = min_age
+ if max_age is not None:
+ entry["max_age"] = max_age
+ if min_number_of_series_related_instances is not None:
+ entry["min_number_of_series_related_instances"] = (
+ min_number_of_series_related_instances
+ )
+
+ if not entry:
+ print(f"Warning: skipping empty row {row_num}", file=sys.stderr)
+ continue
+
+ filters.append(entry)
+
+ return filters
+
+
+def main(argv: list[str] | None = None) -> None:
+ args = parse_args(argv)
+
+ if not args.csv_file.exists():
+ raise SystemExit(f"Error: file not found: {args.csv_file}")
+
+ if args.min_age is not None and args.min_age < 0:
+ raise SystemExit("Error: --min-age must be non-negative")
+ if args.max_age is not None and args.max_age < 0:
+ raise SystemExit("Error: --max-age must be non-negative")
+ if (
+ args.min_age is not None
+ and args.max_age is not None
+ and args.min_age > args.max_age
+ ):
+ raise SystemExit(
+ f"Error: --min-age ({args.min_age}) cannot exceed --max-age ({args.max_age})"
+ )
+ if args.min_series_instances is not None and args.min_series_instances < 1:
+ raise SystemExit("Error: --min-series-instances must be at least 1")
+
+ filters = csv_to_filters(
+ args.csv_file,
+ delimiter=args.delimiter,
+ min_age=args.min_age,
+ max_age=args.max_age,
+ min_number_of_series_related_instances=args.min_series_instances,
+ )
+
+ if not filters:
+ raise SystemExit("Error: no valid filter rows found in CSV.")
+
+ output = json.dumps(filters, indent=2, ensure_ascii=False)
+
+ if args.output:
+ args.output.write_text(output + "\n", encoding="utf-8")
+ print(f"Wrote {len(filters)} filter(s) to {args.output}", file=sys.stderr)
+ else:
+ print(output)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/uv.lock b/uv.lock
index cb01e7621..b03a9f73d 100644
--- a/uv.lock
+++ b/uv.lock
@@ -23,6 +23,7 @@ dependencies = [
{ name = "dicomweb-client" },
{ name = "django" },
{ name = "django-block-fragments" },
+ { name = "django-codemirror" },
{ name = "django-cotton" },
{ name = "django-crispy-forms" },
{ name = "django-dbbackup" },
@@ -45,6 +46,7 @@ dependencies = [
{ name = "procrastinate", extra = ["django"] },
{ name = "psycopg", extra = ["binary"] },
{ name = "pyarrow" },
+ { name = "pydantic" },
{ name = "pydicom" },
{ name = "pynetdicom" },
{ name = "stamina" },
@@ -115,6 +117,7 @@ requires-dist = [
{ name = "dicomweb-client", specifier = ">=0.60.0" },
{ name = "django", specifier = ">=5.1.6" },
{ name = "django-block-fragments", specifier = ">=0.1.1" },
+ { name = "django-codemirror", specifier = ">=1.0.1" },
{ name = "django-cotton", specifier = ">=1.6.0" },
{ name = "django-crispy-forms", specifier = ">=2.3" },
{ name = "django-dbbackup", specifier = ">=4.2.1" },
@@ -137,6 +140,7 @@ requires-dist = [
{ name = "procrastinate", extras = ["django"], specifier = ">=3.0.2" },
{ name = "psycopg", extras = ["binary"], specifier = ">=3.2.5" },
{ name = "pyarrow", specifier = ">=19.0.1" },
+ { name = "pydantic", specifier = ">=2.12.5" },
{ name = "pydicom", specifier = ">=2.4.4" },
{ name = "pynetdicom", specifier = ">=2.1.1" },
{ name = "stamina", specifier = ">=24.2.0" },
@@ -268,6 +272,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" },
]
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+]
+
[[package]]
name = "anyio"
version = "4.12.1"
@@ -900,6 +913,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/48/90/01755e4a42558b763f7021e9369aa6aa94c2ede7313deed56cb7483834ab/django_cache_url-3.4.5-py2.py3-none-any.whl", hash = "sha256:5f350759978483ab85dc0e3e17b3d53eed3394a28148f6bf0f53d11d0feb5b3c", size = 4760, upload-time = "2023-12-04T17:19:44.355Z" },
]
+[[package]]
+name = "django-codemirror"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/74/d6/5d2d4404a64d09851a720c03dd16be6166220f5cd788d4c17338a0b21974/django-codemirror-1.0.1.tar.gz", hash = "sha256:02cff11180922a513324edaf55d66b273ce61e8d66269b49c054e9f4f9f6fbec", size = 1985, upload-time = "2022-04-11T07:14:49.988Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/16/1ebd4e7fb6db30dccebcca37116149880214061e5a4b30f6a969a748b567/django_codemirror-1.0.1-py3-none-any.whl", hash = "sha256:52447f09ddcaca9b7772f5266da92820bfcf2925ea78b4403c29c7261100eca7", size = 3051, upload-time = "2022-04-11T07:14:48.246Z" },
+]
+
[[package]]
name = "django-cotton"
version = "2.6.0"
@@ -2552,6 +2574,92 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" },
]
+[[package]]
+name = "pydantic"
+version = "2.12.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+ { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+ { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+ { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+ { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+ { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+ { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+ { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+ { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
+]
+
[[package]]
name = "pydicom"
version = "3.0.2"
@@ -3355,6 +3463,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
+]
+
[[package]]
name = "tzdata"
version = "2025.3"